mirror of
https://github.com/x1xhlol/system-prompts-and-models-of-ai-tools.git
synced 2026-02-04 14:00:49 +00:00
nhj
more
This commit is contained in:
470
unified-ai-platform/node_modules/ssri/index.js
generated
vendored
Normal file
470
unified-ai-platform/node_modules/ssri/index.js
generated
vendored
Normal file
@@ -0,0 +1,470 @@
|
||||
'use strict'
|
||||
|
||||
const crypto = require('crypto')
|
||||
const MiniPass = require('minipass')
|
||||
|
||||
const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
|
||||
|
||||
// TODO: this should really be a hardcoded list of algorithms we support,
|
||||
// rather than [a-z0-9].
|
||||
const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
|
||||
const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/
|
||||
const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/
|
||||
const VCHAR_REGEX = /^[\x21-\x7E]+$/
|
||||
|
||||
const defaultOpts = {
|
||||
algorithms: ['sha512'],
|
||||
error: false,
|
||||
options: [],
|
||||
pickAlgorithm: getPrioritizedHash,
|
||||
sep: ' ',
|
||||
single: false,
|
||||
strict: false
|
||||
}
|
||||
|
||||
const ssriOpts = (opts = {}) => ({ ...defaultOpts, ...opts })
|
||||
|
||||
const getOptString = options => !options || !options.length
|
||||
? ''
|
||||
: `?${options.join('?')}`
|
||||
|
||||
const _onEnd = Symbol('_onEnd')
|
||||
const _getOptions = Symbol('_getOptions')
|
||||
class IntegrityStream extends MiniPass {
|
||||
constructor (opts) {
|
||||
super()
|
||||
this.size = 0
|
||||
this.opts = opts
|
||||
|
||||
// may be overridden later, but set now for class consistency
|
||||
this[_getOptions]()
|
||||
|
||||
// options used for calculating stream. can't be changed.
|
||||
const { algorithms = defaultOpts.algorithms } = opts
|
||||
this.algorithms = Array.from(
|
||||
new Set(algorithms.concat(this.algorithm ? [this.algorithm] : []))
|
||||
)
|
||||
this.hashes = this.algorithms.map(crypto.createHash)
|
||||
}
|
||||
|
||||
[_getOptions] () {
|
||||
const {
|
||||
integrity,
|
||||
size,
|
||||
options
|
||||
} = { ...defaultOpts, ...this.opts }
|
||||
|
||||
// For verification
|
||||
this.sri = integrity ? parse(integrity, this.opts) : null
|
||||
this.expectedSize = size
|
||||
this.goodSri = this.sri ? !!Object.keys(this.sri).length : false
|
||||
this.algorithm = this.goodSri ? this.sri.pickAlgorithm(this.opts) : null
|
||||
this.digests = this.goodSri ? this.sri[this.algorithm] : null
|
||||
this.optString = getOptString(options)
|
||||
}
|
||||
|
||||
emit (ev, data) {
|
||||
if (ev === 'end') this[_onEnd]()
|
||||
return super.emit(ev, data)
|
||||
}
|
||||
|
||||
write (data) {
|
||||
this.size += data.length
|
||||
this.hashes.forEach(h => h.update(data))
|
||||
return super.write(data)
|
||||
}
|
||||
|
||||
[_onEnd] () {
|
||||
if (!this.goodSri) {
|
||||
this[_getOptions]()
|
||||
}
|
||||
const newSri = parse(this.hashes.map((h, i) => {
|
||||
return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}`
|
||||
}).join(' '), this.opts)
|
||||
// Integrity verification mode
|
||||
const match = this.goodSri && newSri.match(this.sri, this.opts)
|
||||
if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) {
|
||||
const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`)
|
||||
err.code = 'EBADSIZE'
|
||||
err.found = this.size
|
||||
err.expected = this.expectedSize
|
||||
err.sri = this.sri
|
||||
this.emit('error', err)
|
||||
} else if (this.sri && !match) {
|
||||
const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.found = newSri
|
||||
err.expected = this.digests
|
||||
err.algorithm = this.algorithm
|
||||
err.sri = this.sri
|
||||
this.emit('error', err)
|
||||
} else {
|
||||
this.emit('size', this.size)
|
||||
this.emit('integrity', newSri)
|
||||
match && this.emit('verified', match)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Hash {
|
||||
get isHash () { return true }
|
||||
constructor (hash, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const strict = !!opts.strict
|
||||
this.source = hash.trim()
|
||||
|
||||
// set default values so that we make V8 happy to
|
||||
// always see a familiar object template.
|
||||
this.digest = ''
|
||||
this.algorithm = ''
|
||||
this.options = []
|
||||
|
||||
// 3.1. Integrity metadata (called "Hash" by ssri)
|
||||
// https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
|
||||
const match = this.source.match(
|
||||
strict
|
||||
? STRICT_SRI_REGEX
|
||||
: SRI_REGEX
|
||||
)
|
||||
if (!match) { return }
|
||||
if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
|
||||
this.algorithm = match[1]
|
||||
this.digest = match[2]
|
||||
|
||||
const rawOpts = match[3]
|
||||
if (rawOpts) {
|
||||
this.options = rawOpts.slice(1).split('?')
|
||||
}
|
||||
}
|
||||
|
||||
hexDigest () {
|
||||
return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
|
||||
}
|
||||
|
||||
toJSON () {
|
||||
return this.toString()
|
||||
}
|
||||
|
||||
toString (opts) {
|
||||
opts = ssriOpts(opts)
|
||||
if (opts.strict) {
|
||||
// Strict mode enforces the standard as close to the foot of the
|
||||
// letter as it can.
|
||||
if (!(
|
||||
// The spec has very restricted productions for algorithms.
|
||||
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
||||
SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
|
||||
// Usually, if someone insists on using a "different" base64, we
|
||||
// leave it as-is, since there's multiple standards, and the
|
||||
// specified is not a URL-safe variant.
|
||||
// https://www.w3.org/TR/CSP2/#base64_value
|
||||
this.digest.match(BASE64_REGEX) &&
|
||||
// Option syntax is strictly visual chars.
|
||||
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
|
||||
// https://tools.ietf.org/html/rfc5234#appendix-B.1
|
||||
this.options.every(opt => opt.match(VCHAR_REGEX))
|
||||
)) {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
const options = this.options && this.options.length
|
||||
? `?${this.options.join('?')}`
|
||||
: ''
|
||||
return `${this.algorithm}-${this.digest}${options}`
|
||||
}
|
||||
}
|
||||
|
||||
class Integrity {
|
||||
get isIntegrity () { return true }
|
||||
toJSON () {
|
||||
return this.toString()
|
||||
}
|
||||
|
||||
isEmpty () {
|
||||
return Object.keys(this).length === 0
|
||||
}
|
||||
|
||||
toString (opts) {
|
||||
opts = ssriOpts(opts)
|
||||
let sep = opts.sep || ' '
|
||||
if (opts.strict) {
|
||||
// Entries must be separated by whitespace, according to spec.
|
||||
sep = sep.replace(/\S+/g, ' ')
|
||||
}
|
||||
return Object.keys(this).map(k => {
|
||||
return this[k].map(hash => {
|
||||
return Hash.prototype.toString.call(hash, opts)
|
||||
}).filter(x => x.length).join(sep)
|
||||
}).filter(x => x.length).join(sep)
|
||||
}
|
||||
|
||||
concat (integrity, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const other = typeof integrity === 'string'
|
||||
? integrity
|
||||
: stringify(integrity, opts)
|
||||
return parse(`${this.toString(opts)} ${other}`, opts)
|
||||
}
|
||||
|
||||
hexDigest () {
|
||||
return parse(this, { single: true }).hexDigest()
|
||||
}
|
||||
|
||||
// add additional hashes to an integrity value, but prevent
|
||||
// *changing* an existing integrity hash.
|
||||
merge (integrity, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const other = parse(integrity, opts)
|
||||
for (const algo in other) {
|
||||
if (this[algo]) {
|
||||
if (!this[algo].find(hash =>
|
||||
other[algo].find(otherhash =>
|
||||
hash.digest === otherhash.digest))) {
|
||||
throw new Error('hashes do not match, cannot update integrity')
|
||||
}
|
||||
} else {
|
||||
this[algo] = other[algo]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match (integrity, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const other = parse(integrity, opts)
|
||||
const algo = other.pickAlgorithm(opts)
|
||||
return (
|
||||
this[algo] &&
|
||||
other[algo] &&
|
||||
this[algo].find(hash =>
|
||||
other[algo].find(otherhash =>
|
||||
hash.digest === otherhash.digest
|
||||
)
|
||||
)
|
||||
) || false
|
||||
}
|
||||
|
||||
pickAlgorithm (opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const pickAlgorithm = opts.pickAlgorithm
|
||||
const keys = Object.keys(this)
|
||||
return keys.reduce((acc, algo) => {
|
||||
return pickAlgorithm(acc, algo) || acc
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.parse = parse
|
||||
function parse (sri, opts) {
|
||||
if (!sri) return null
|
||||
opts = ssriOpts(opts)
|
||||
if (typeof sri === 'string') {
|
||||
return _parse(sri, opts)
|
||||
} else if (sri.algorithm && sri.digest) {
|
||||
const fullSri = new Integrity()
|
||||
fullSri[sri.algorithm] = [sri]
|
||||
return _parse(stringify(fullSri, opts), opts)
|
||||
} else {
|
||||
return _parse(stringify(sri, opts), opts)
|
||||
}
|
||||
}
|
||||
|
||||
function _parse (integrity, opts) {
|
||||
// 3.4.3. Parse metadata
|
||||
// https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
||||
if (opts.single) {
|
||||
return new Hash(integrity, opts)
|
||||
}
|
||||
const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => {
|
||||
const hash = new Hash(string, opts)
|
||||
if (hash.algorithm && hash.digest) {
|
||||
const algo = hash.algorithm
|
||||
if (!acc[algo]) { acc[algo] = [] }
|
||||
acc[algo].push(hash)
|
||||
}
|
||||
return acc
|
||||
}, new Integrity())
|
||||
return hashes.isEmpty() ? null : hashes
|
||||
}
|
||||
|
||||
module.exports.stringify = stringify
|
||||
function stringify (obj, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
if (obj.algorithm && obj.digest) {
|
||||
return Hash.prototype.toString.call(obj, opts)
|
||||
} else if (typeof obj === 'string') {
|
||||
return stringify(parse(obj, opts), opts)
|
||||
} else {
|
||||
return Integrity.prototype.toString.call(obj, opts)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.fromHex = fromHex
|
||||
function fromHex (hexDigest, algorithm, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const optString = getOptString(opts.options)
|
||||
return parse(
|
||||
`${algorithm}-${
|
||||
Buffer.from(hexDigest, 'hex').toString('base64')
|
||||
}${optString}`, opts
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.fromData = fromData
|
||||
function fromData (data, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const algorithms = opts.algorithms
|
||||
const optString = getOptString(opts.options)
|
||||
return algorithms.reduce((acc, algo) => {
|
||||
const digest = crypto.createHash(algo).update(data).digest('base64')
|
||||
const hash = new Hash(
|
||||
`${algo}-${digest}${optString}`,
|
||||
opts
|
||||
)
|
||||
/* istanbul ignore else - it would be VERY strange if the string we
|
||||
* just calculated with an algo did not have an algo or digest.
|
||||
*/
|
||||
if (hash.algorithm && hash.digest) {
|
||||
const algo = hash.algorithm
|
||||
if (!acc[algo]) { acc[algo] = [] }
|
||||
acc[algo].push(hash)
|
||||
}
|
||||
return acc
|
||||
}, new Integrity())
|
||||
}
|
||||
|
||||
module.exports.fromStream = fromStream
|
||||
function fromStream (stream, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const istream = integrityStream(opts)
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.pipe(istream)
|
||||
stream.on('error', reject)
|
||||
istream.on('error', reject)
|
||||
let sri
|
||||
istream.on('integrity', s => { sri = s })
|
||||
istream.on('end', () => resolve(sri))
|
||||
istream.on('data', () => {})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.checkData = checkData
|
||||
function checkData (data, sri, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
sri = parse(sri, opts)
|
||||
if (!sri || !Object.keys(sri).length) {
|
||||
if (opts.error) {
|
||||
throw Object.assign(
|
||||
new Error('No valid integrity hashes to check against'), {
|
||||
code: 'EINTEGRITY'
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
const algorithm = sri.pickAlgorithm(opts)
|
||||
const digest = crypto.createHash(algorithm).update(data).digest('base64')
|
||||
const newSri = parse({ algorithm, digest })
|
||||
const match = newSri.match(sri, opts)
|
||||
if (match || !opts.error) {
|
||||
return match
|
||||
} else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
|
||||
const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
|
||||
err.code = 'EBADSIZE'
|
||||
err.found = data.length
|
||||
err.expected = opts.size
|
||||
err.sri = sri
|
||||
throw err
|
||||
} else {
|
||||
const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.found = newSri
|
||||
err.expected = sri
|
||||
err.algorithm = algorithm
|
||||
err.sri = sri
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.checkStream = checkStream
|
||||
function checkStream (stream, sri, opts) {
|
||||
opts = ssriOpts(opts)
|
||||
opts.integrity = sri
|
||||
sri = parse(sri, opts)
|
||||
if (!sri || !Object.keys(sri).length) {
|
||||
return Promise.reject(Object.assign(
|
||||
new Error('No valid integrity hashes to check against'), {
|
||||
code: 'EINTEGRITY'
|
||||
}
|
||||
))
|
||||
}
|
||||
const checker = integrityStream(opts)
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.pipe(checker)
|
||||
stream.on('error', reject)
|
||||
checker.on('error', reject)
|
||||
let sri
|
||||
checker.on('verified', s => { sri = s })
|
||||
checker.on('end', () => resolve(sri))
|
||||
checker.on('data', () => {})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.integrityStream = integrityStream
|
||||
function integrityStream (opts = {}) {
|
||||
return new IntegrityStream(opts)
|
||||
}
|
||||
|
||||
module.exports.create = createIntegrity
|
||||
function createIntegrity (opts) {
|
||||
opts = ssriOpts(opts)
|
||||
const algorithms = opts.algorithms
|
||||
const optString = getOptString(opts.options)
|
||||
|
||||
const hashes = algorithms.map(crypto.createHash)
|
||||
|
||||
return {
|
||||
update: function (chunk, enc) {
|
||||
hashes.forEach(h => h.update(chunk, enc))
|
||||
return this
|
||||
},
|
||||
digest: function (enc) {
|
||||
const integrity = algorithms.reduce((acc, algo) => {
|
||||
const digest = hashes.shift().digest('base64')
|
||||
const hash = new Hash(
|
||||
`${algo}-${digest}${optString}`,
|
||||
opts
|
||||
)
|
||||
/* istanbul ignore else - it would be VERY strange if the hash we
|
||||
* just calculated with an algo did not have an algo or digest.
|
||||
*/
|
||||
if (hash.algorithm && hash.digest) {
|
||||
const algo = hash.algorithm
|
||||
if (!acc[algo]) { acc[algo] = [] }
|
||||
acc[algo].push(hash)
|
||||
}
|
||||
return acc
|
||||
}, new Integrity())
|
||||
|
||||
return integrity
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const NODE_HASHES = new Set(crypto.getHashes())
|
||||
|
||||
// This is a Best Effort™ at a reasonable priority for hash algos
|
||||
const DEFAULT_PRIORITY = [
|
||||
'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
|
||||
// TODO - it's unclear _which_ of these Node will actually use as its name
|
||||
// for the algorithm, so we guesswork it based on the OpenSSL names.
|
||||
'sha3',
|
||||
'sha3-256', 'sha3-384', 'sha3-512',
|
||||
'sha3_256', 'sha3_384', 'sha3_512'
|
||||
].filter(algo => NODE_HASHES.has(algo))
|
||||
|
||||
function getPrioritizedHash (algo1, algo2) {
|
||||
return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
|
||||
? algo1
|
||||
: algo2
|
||||
}
|
||||
15
unified-ai-platform/node_modules/ssri/node_modules/minipass/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/ssri/node_modules/minipass/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
0
unified-ai-platform/node_modules/ssri/node_modules/minipass/README.md
generated
vendored
Normal file
0
unified-ai-platform/node_modules/ssri/node_modules/minipass/README.md
generated
vendored
Normal file
649
unified-ai-platform/node_modules/ssri/node_modules/minipass/index.js
generated
vendored
Normal file
649
unified-ai-platform/node_modules/ssri/node_modules/minipass/index.js
generated
vendored
Normal file
@@ -0,0 +1,649 @@
|
||||
'use strict'
|
||||
const proc = typeof process === 'object' && process ? process : {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const SD = require('string_decoder').StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
const EMITDATA = Symbol('emitData')
|
||||
const EMITEND = Symbol('emitEnd')
|
||||
const EMITEND2 = Symbol('emitEnd2')
|
||||
const ASYNC = Symbol('async')
|
||||
|
||||
const defer = fn => Promise.resolve().then(fn)
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|
||||
|| Symbol('asyncIterator not implemented')
|
||||
const ITERATOR = doIter && Symbol.iterator
|
||||
|| Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev =>
|
||||
ev === 'end' ||
|
||||
ev === 'finish' ||
|
||||
ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b => b instanceof ArrayBuffer ||
|
||||
typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
class Pipe {
|
||||
constructor (src, dest, opts) {
|
||||
this.src = src
|
||||
this.dest = dest
|
||||
this.opts = opts
|
||||
this.ondrain = () => src[RESUME]()
|
||||
dest.on('drain', this.ondrain)
|
||||
}
|
||||
unpipe () {
|
||||
this.dest.removeListener('drain', this.ondrain)
|
||||
}
|
||||
// istanbul ignore next - only here for the prototype
|
||||
proxyErrors () {}
|
||||
end () {
|
||||
this.unpipe()
|
||||
if (this.opts.end)
|
||||
this.dest.end()
|
||||
}
|
||||
}
|
||||
|
||||
class PipeProxyErrors extends Pipe {
|
||||
unpipe () {
|
||||
this.src.removeListener('error', this.proxyErrors)
|
||||
super.unpipe()
|
||||
}
|
||||
constructor (src, dest, opts) {
|
||||
super(src, dest, opts)
|
||||
this.proxyErrors = er => dest.emit('error', er)
|
||||
src.on('error', this.proxyErrors)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class Minipass extends Stream {
|
||||
constructor (options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this.pipes = []
|
||||
this.buffer = []
|
||||
this[OBJECTMODE] = options && options.objectMode || false
|
||||
if (this[OBJECTMODE])
|
||||
this[ENCODING] = null
|
||||
else
|
||||
this[ENCODING] = options && options.encoding || null
|
||||
if (this[ENCODING] === 'buffer')
|
||||
this[ENCODING] = null
|
||||
this[ASYNC] = options && !!options.async || false
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
}
|
||||
|
||||
get bufferLength () { return this[BUFFERLENGTH] }
|
||||
|
||||
get encoding () { return this[ENCODING] }
|
||||
set encoding (enc) {
|
||||
if (this[OBJECTMODE])
|
||||
throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (this[ENCODING] && enc !== this[ENCODING] &&
|
||||
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this.buffer.length)
|
||||
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding (enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode () { return this[OBJECTMODE] }
|
||||
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
|
||||
|
||||
get ['async'] () { return this[ASYNC] }
|
||||
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (this[EOF])
|
||||
throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit('error', Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
))
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const fn = this[ASYNC] ? defer : f => f()
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk))
|
||||
chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// handle object mode up front, since it's simpler
|
||||
// this yields better performance, fewer checks later.
|
||||
if (this[OBJECTMODE]) {
|
||||
/* istanbul ignore if - maybe impossible? */
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// at this point the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
if (cb)
|
||||
fn(cb)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (typeof chunk === 'string' &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
// Note: flushing CAN potentially switch us into not-flowing mode
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read (n) {
|
||||
if (this[DESTROYED])
|
||||
return null
|
||||
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
|
||||
this[MAYBE_EMIT_END]()
|
||||
return null
|
||||
}
|
||||
|
||||
if (this[OBJECTMODE])
|
||||
n = null
|
||||
|
||||
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding)
|
||||
this.buffer = [this.buffer.join('')]
|
||||
else
|
||||
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
|
||||
}
|
||||
|
||||
const ret = this[READ](n || null, this.buffer[0])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[READ] (n, chunk) {
|
||||
if (n === chunk.length || n === null)
|
||||
this[BUFFERSHIFT]()
|
||||
else {
|
||||
this.buffer[0] = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME] () {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this.buffer.length)
|
||||
this[FLUSH]()
|
||||
else if (this[EOF])
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
resume () {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause () {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed () {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing () {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH] (chunk) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] += 1
|
||||
else
|
||||
this[BUFFERLENGTH] += chunk.length
|
||||
this.buffer.push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT] () {
|
||||
if (this.buffer.length) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] -= 1
|
||||
else
|
||||
this[BUFFERLENGTH] -= this.buffer[0].length
|
||||
}
|
||||
return this.buffer.shift()
|
||||
}
|
||||
|
||||
[FLUSH] (noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
|
||||
|
||||
if (!noDrain && !this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK] (chunk) {
|
||||
return chunk ? (this.emit('data', chunk), this.flowing) : false
|
||||
}
|
||||
|
||||
pipe (dest, opts) {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr)
|
||||
opts.end = false
|
||||
else
|
||||
opts.end = opts.end !== false
|
||||
opts.proxyErrors = !!opts.proxyErrors
|
||||
|
||||
// piping an ended stream ends immediately
|
||||
if (ended) {
|
||||
if (opts.end)
|
||||
dest.end()
|
||||
} else {
|
||||
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
|
||||
: new PipeProxyErrors(this, dest, opts))
|
||||
if (this[ASYNC])
|
||||
defer(() => this[RESUME]())
|
||||
else
|
||||
this[RESUME]()
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
|
||||
unpipe (dest) {
|
||||
const p = this.pipes.find(p => p.dest === dest)
|
||||
if (p) {
|
||||
this.pipes.splice(this.pipes.indexOf(p), 1)
|
||||
p.unpipe()
|
||||
}
|
||||
}
|
||||
|
||||
addListener (ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on (ev, fn) {
|
||||
const ret = super.on(ev, fn)
|
||||
if (ev === 'data' && !this.pipes.length && !this.flowing)
|
||||
this[RESUME]()
|
||||
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
|
||||
super.emit('readable')
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
if (this[ASYNC])
|
||||
defer(() => fn.call(this, this[EMITTED_ERROR]))
|
||||
else
|
||||
fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get emittedEnd () {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END] () {
|
||||
if (!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this.buffer.length === 0 &&
|
||||
this[EOF]) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED])
|
||||
this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit (ev, data, ...extra) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
return !data ? false
|
||||
: this[ASYNC] ? defer(() => this[EMITDATA](data))
|
||||
: this[EMITDATA](data)
|
||||
} else if (ev === 'end') {
|
||||
return this[EMITEND]()
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||||
return
|
||||
const ret = super.emit('close')
|
||||
this.removeAllListeners('close')
|
||||
return ret
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
const ret = super.emit('error', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'resume') {
|
||||
const ret = super.emit('resume')
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'finish' || ev === 'prefinish') {
|
||||
const ret = super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Some other unknown event
|
||||
const ret = super.emit(ev, data, ...extra)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITDATA] (data) {
|
||||
for (const p of this.pipes) {
|
||||
if (p.dest.write(data) === false)
|
||||
this.pause()
|
||||
}
|
||||
const ret = super.emit('data', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITEND] () {
|
||||
if (this[EMITTED_END])
|
||||
return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
if (this[ASYNC])
|
||||
defer(() => this[EMITEND2]())
|
||||
else
|
||||
this[EMITEND2]()
|
||||
}
|
||||
|
||||
[EMITEND2] () {
|
||||
if (this[DECODER]) {
|
||||
const data = this[DECODER].end()
|
||||
if (data) {
|
||||
for (const p of this.pipes) {
|
||||
p.dest.write(data)
|
||||
}
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of this.pipes) {
|
||||
p.end()
|
||||
}
|
||||
const ret = super.emit('end')
|
||||
this.removeAllListeners('end')
|
||||
return ret
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect () {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat () {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR] () {
|
||||
const next = () => {
|
||||
const res = this.read()
|
||||
if (res !== null)
|
||||
return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF])
|
||||
return Promise.resolve({ done: true })
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return { next }
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR] () {
|
||||
const next = () => {
|
||||
const value = this.read()
|
||||
const done = value === null
|
||||
return { value, done }
|
||||
}
|
||||
return { next }
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else
|
||||
this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this.buffer.length = 0
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED])
|
||||
this.close()
|
||||
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else // if no error to emit, still reject pending promises
|
||||
this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream (s) {
|
||||
return !!s && (s instanceof Minipass || s instanceof Stream ||
|
||||
s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
))
|
||||
}
|
||||
}
|
||||
56
unified-ai-platform/node_modules/ssri/node_modules/minipass/package.json
generated
vendored
Normal file
56
unified-ai-platform/node_modules/ssri/node_modules/minipass/package.json
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "minipass",
|
||||
"version": "3.3.6",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.41",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^16.2.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^10.8.1",
|
||||
"typescript": "^4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
}
|
||||
15
unified-ai-platform/node_modules/ssri/node_modules/yallist/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/ssri/node_modules/yallist/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
8
unified-ai-platform/node_modules/ssri/node_modules/yallist/iterator.js
generated
vendored
Normal file
8
unified-ai-platform/node_modules/ssri/node_modules/yallist/iterator.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
module.exports = function (Yallist) {
|
||||
Yallist.prototype[Symbol.iterator] = function* () {
|
||||
for (let walker = this.head; walker; walker = walker.next) {
|
||||
yield walker.value
|
||||
}
|
||||
}
|
||||
}
|
||||
29
unified-ai-platform/node_modules/ssri/node_modules/yallist/package.json
generated
vendored
Normal file
29
unified-ai-platform/node_modules/ssri/node_modules/yallist/package.json
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "yallist",
|
||||
"version": "4.0.0",
|
||||
"description": "Yet Another Linked List",
|
||||
"main": "yallist.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"files": [
|
||||
"yallist.js",
|
||||
"iterator.js"
|
||||
],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"tap": "^12.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js --100",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/yallist.git"
|
||||
},
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC"
|
||||
}
|
||||
426
unified-ai-platform/node_modules/ssri/node_modules/yallist/yallist.js
generated
vendored
Normal file
426
unified-ai-platform/node_modules/ssri/node_modules/yallist/yallist.js
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
||||
'use strict'
|
||||
module.exports = Yallist
|
||||
|
||||
Yallist.Node = Node
|
||||
Yallist.create = Yallist
|
||||
|
||||
function Yallist (list) {
|
||||
var self = this
|
||||
if (!(self instanceof Yallist)) {
|
||||
self = new Yallist()
|
||||
}
|
||||
|
||||
self.tail = null
|
||||
self.head = null
|
||||
self.length = 0
|
||||
|
||||
if (list && typeof list.forEach === 'function') {
|
||||
list.forEach(function (item) {
|
||||
self.push(item)
|
||||
})
|
||||
} else if (arguments.length > 0) {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
self.push(arguments[i])
|
||||
}
|
||||
}
|
||||
|
||||
return self
|
||||
}
|
||||
|
||||
Yallist.prototype.removeNode = function (node) {
|
||||
if (node.list !== this) {
|
||||
throw new Error('removing node which does not belong to this list')
|
||||
}
|
||||
|
||||
var next = node.next
|
||||
var prev = node.prev
|
||||
|
||||
if (next) {
|
||||
next.prev = prev
|
||||
}
|
||||
|
||||
if (prev) {
|
||||
prev.next = next
|
||||
}
|
||||
|
||||
if (node === this.head) {
|
||||
this.head = next
|
||||
}
|
||||
if (node === this.tail) {
|
||||
this.tail = prev
|
||||
}
|
||||
|
||||
node.list.length--
|
||||
node.next = null
|
||||
node.prev = null
|
||||
node.list = null
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
Yallist.prototype.unshiftNode = function (node) {
|
||||
if (node === this.head) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var head = this.head
|
||||
node.list = this
|
||||
node.next = head
|
||||
if (head) {
|
||||
head.prev = node
|
||||
}
|
||||
|
||||
this.head = node
|
||||
if (!this.tail) {
|
||||
this.tail = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.pushNode = function (node) {
|
||||
if (node === this.tail) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var tail = this.tail
|
||||
node.list = this
|
||||
node.prev = tail
|
||||
if (tail) {
|
||||
tail.next = node
|
||||
}
|
||||
|
||||
this.tail = node
|
||||
if (!this.head) {
|
||||
this.head = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.push = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
push(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.unshift = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
unshift(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.pop = function () {
|
||||
if (!this.tail) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.tail.value
|
||||
this.tail = this.tail.prev
|
||||
if (this.tail) {
|
||||
this.tail.next = null
|
||||
} else {
|
||||
this.head = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.shift = function () {
|
||||
if (!this.head) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.head.value
|
||||
this.head = this.head.next
|
||||
if (this.head) {
|
||||
this.head.prev = null
|
||||
} else {
|
||||
this.tail = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.forEach = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.head, i = 0; walker !== null; i++) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.next
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.forEachReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.prev
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.get = function (n) {
|
||||
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.next
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.getReverse = function (n) {
|
||||
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.prev
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.map = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.head; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.next
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.mapReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.tail; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.prev
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.reduce = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.head
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.head) {
|
||||
walker = this.head.next
|
||||
acc = this.head.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = 0; walker !== null; i++) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.reduceReverse = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.tail
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.tail) {
|
||||
walker = this.tail.prev
|
||||
acc = this.tail.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = this.length - 1; walker !== null; i--) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.toArray = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.head; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.next
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.toArrayReverse = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.tail; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.prev
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.slice = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
for (; walker !== null && i < to; i++, walker = walker.next) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.sliceReverse = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
|
||||
walker = walker.prev
|
||||
}
|
||||
for (; walker !== null && i > from; i--, walker = walker.prev) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
|
||||
if (start > this.length) {
|
||||
start = this.length - 1
|
||||
}
|
||||
if (start < 0) {
|
||||
start = this.length + start;
|
||||
}
|
||||
|
||||
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
var ret = []
|
||||
for (var i = 0; walker && i < deleteCount; i++) {
|
||||
ret.push(walker.value)
|
||||
walker = this.removeNode(walker)
|
||||
}
|
||||
if (walker === null) {
|
||||
walker = this.tail
|
||||
}
|
||||
|
||||
if (walker !== this.head && walker !== this.tail) {
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
for (var i = 0; i < nodes.length; i++) {
|
||||
walker = insert(this, walker, nodes[i])
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
Yallist.prototype.reverse = function () {
|
||||
var head = this.head
|
||||
var tail = this.tail
|
||||
for (var walker = head; walker !== null; walker = walker.prev) {
|
||||
var p = walker.prev
|
||||
walker.prev = walker.next
|
||||
walker.next = p
|
||||
}
|
||||
this.head = tail
|
||||
this.tail = head
|
||||
return this
|
||||
}
|
||||
|
||||
function insert (self, node, value) {
|
||||
var inserted = node === self.head ?
|
||||
new Node(value, null, node, self) :
|
||||
new Node(value, node, node.next, self)
|
||||
|
||||
if (inserted.next === null) {
|
||||
self.tail = inserted
|
||||
}
|
||||
if (inserted.prev === null) {
|
||||
self.head = inserted
|
||||
}
|
||||
|
||||
self.length++
|
||||
|
||||
return inserted
|
||||
}
|
||||
|
||||
function push (self, item) {
|
||||
self.tail = new Node(item, self.tail, null, self)
|
||||
if (!self.head) {
|
||||
self.head = self.tail
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function unshift (self, item) {
|
||||
self.head = new Node(item, null, self.head, self)
|
||||
if (!self.tail) {
|
||||
self.tail = self.head
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function Node (value, prev, next, list) {
|
||||
if (!(this instanceof Node)) {
|
||||
return new Node(value, prev, next, list)
|
||||
}
|
||||
|
||||
this.list = list
|
||||
this.value = value
|
||||
|
||||
if (prev) {
|
||||
prev.next = this
|
||||
this.prev = prev
|
||||
} else {
|
||||
this.prev = null
|
||||
}
|
||||
|
||||
if (next) {
|
||||
next.prev = this
|
||||
this.next = next
|
||||
} else {
|
||||
this.next = null
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// add if support for Symbol.iterator is present
|
||||
require('./iterator.js')(Yallist)
|
||||
} catch (er) {}
|
||||
0
unified-ai-platform/node_modules/ssri/package.json
generated
vendored
Normal file
0
unified-ai-platform/node_modules/ssri/package.json
generated
vendored
Normal file
Reference in New Issue
Block a user