mirror of
https://github.com/x1xhlol/system-prompts-and-models-of-ai-tools.git
synced 2026-02-04 05:50:50 +00:00
nhj
more
This commit is contained in:
16
unified-ai-platform/node_modules/make-fetch-happen/LICENSE
generated
vendored
Normal file
16
unified-ai-platform/node_modules/make-fetch-happen/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for
|
||||
any purpose with or without fee is hereby granted, provided that the
|
||||
above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
|
||||
ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
|
||||
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
94
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/README.md
generated
vendored
Normal file
94
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/README.md
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# lru cache
|
||||
|
||||
A cache object that deletes the least-recently-used items.
|
||||
|
||||
[](https://travis-ci.org/isaacs/node-lru-cache) [](https://coveralls.io/github/isaacs/node-lru-cache)
|
||||
|
||||
## Installation:
|
||||
|
||||
```javascript
|
||||
npm install lru-cache --save
|
||||
```
|
||||
|
||||
## Usage:
|
||||
|
||||
```javascript
|
||||
var LRU = require("lru-cache")
|
||||
, options = { max: 500
|
||||
, length: function (n, key) { return n * 2 + key.length }
|
||||
, dispose: function (key, n) { n.close() }
|
||||
, maxAge: 1000 * 60 * 60 }
|
||||
, cache = new LRU(options)
|
||||
, otherCache = new LRU(50) // sets just the max size
|
||||
|
||||
cache.set("key", "value")
|
||||
cache.get("key") // "value"
|
||||
|
||||
// non-string keys ARE fully supported
|
||||
// but note that it must be THE SAME object, not
|
||||
// just a JSON-equivalent object.
|
||||
var someObject = { a: 1 }
|
||||
cache.set(someObject, 'a value')
|
||||
// Object keys are not toString()-ed
|
||||
cache.set('[object Object]', 'a different value')
|
||||
assert.equal(cache.get(someObject), 'a value')
|
||||
// A similar object with same keys/values won't work,
|
||||
// because it's a different object identity
|
||||
assert.equal(cache.get({ a: 1 }), undefined)
|
||||
|
||||
cache.reset() // empty the cache
|
||||
```
|
||||
|
||||
If you put more stuff in it, then items will fall out.
|
||||
|
||||
If you try to put an oversized thing in it, then it'll fall out right
|
||||
away.
|
||||
|
||||
## Options
|
||||
|
||||
* `max` The maximum size of the cache, checked by applying the length
|
||||
function to all values in the cache. Not setting this is kind of
|
||||
silly, since that's the whole purpose of this lib, but it defaults
|
||||
to `Infinity`. Setting it to a non-number or negative number will
|
||||
throw a `TypeError`. Setting it to 0 makes it be `Infinity`.
|
||||
* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
|
||||
as they age, but if you try to get an item that is too old, it'll
|
||||
drop it and return undefined instead of giving it to you.
|
||||
Setting this to a negative value will make everything seem old!
|
||||
Setting it to a non-number will throw a `TypeError`.
|
||||
* `length` Function that is used to calculate the length of stored
|
||||
items. If you're storing strings or buffers, then you probably want
|
||||
to do something like `function(n, key){return n.length}`. The default is
|
||||
`function(){return 1}`, which is fine if you want to store `max`
|
||||
like-sized things. The item is passed as the first argument, and
|
||||
the key is passed as the second argumnet.
|
||||
* `dispose` Function that is called on items when they are dropped
|
||||
from the cache. This can be handy if you want to close file
|
||||
descriptors or do other cleanup tasks when items are no longer
|
||||
accessible. Called with `key, value`. It's called *before*
|
||||
actually removing the item from the internal cache, so if you want
|
||||
to immediately put it back in, you'll have to do that in a
|
||||
`nextTick` or `setTimeout` callback or it won't do anything.
|
||||
* `stale` By default, if you set a `maxAge`, it'll only actually pull
|
||||
stale items out of the cache when you `get(key)`. (That is, it's
|
||||
not pre-emptively doing a `setTimeout` or anything.) If you set
|
||||
`stale:true`, it'll return the stale value before deleting it. If
|
||||
you don't set this, then it'll return `undefined` when you try to
|
||||
get a stale entry, as if it had already been deleted.
|
||||
* `noDisposeOnSet` By default, if you set a `dispose()` method, then
|
||||
it'll be called whenever a `set()` operation overwrites an existing
|
||||
key. If you set this option, `dispose()` will only be called when a
|
||||
key falls out of the cache, not when it is overwritten.
|
||||
* `updateAgeOnGet` When using time-expiring entries with `maxAge`,
|
||||
setting this to `true` will make each item's effective time update
|
||||
to the current time whenever it is retrieved from cache, causing it
|
||||
to not expire. (It can still fall out of cache based on recency of
|
||||
use, of course.)
|
||||
|
||||
## API
|
||||
|
||||
* `set(key, value, maxAge)`
|
||||
* `get(key) => value`
|
||||
|
||||
Both of these will update the "recently used"-ness of the key.
|
||||
They do what you think. `maxAge` is optional and over
|
||||
334
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/index.js
generated
vendored
Normal file
334
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/index.js
generated
vendored
Normal file
@@ -0,0 +1,334 @@
|
||||
'use strict'
|
||||
|
||||
// A linked list to keep track of recently-used-ness
|
||||
const Yallist = require('yallist')
|
||||
|
||||
const MAX = Symbol('max')
|
||||
const LENGTH = Symbol('length')
|
||||
const LENGTH_CALCULATOR = Symbol('lengthCalculator')
|
||||
const ALLOW_STALE = Symbol('allowStale')
|
||||
const MAX_AGE = Symbol('maxAge')
|
||||
const DISPOSE = Symbol('dispose')
|
||||
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
|
||||
const LRU_LIST = Symbol('lruList')
|
||||
const CACHE = Symbol('cache')
|
||||
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
|
||||
|
||||
const naiveLength = () => 1
|
||||
|
||||
// lruList is a yallist where the head is the youngest
|
||||
// item, and the tail is the oldest. the list contains the Hit
|
||||
// objects as the entries.
|
||||
// Each Hit object has a reference to its Yallist.Node. This
|
||||
// never changes.
|
||||
//
|
||||
// cache is a Map (or PseudoMap) that matches the keys to
|
||||
// the Yallist.Node object.
|
||||
class LRUCache {
|
||||
constructor (options) {
|
||||
if (typeof options === 'number')
|
||||
options = { max: options }
|
||||
|
||||
if (!options)
|
||||
options = {}
|
||||
|
||||
if (options.max && (typeof options.max !== 'number' || options.max < 0))
|
||||
throw new TypeError('max must be a non-negative number')
|
||||
// Kind of weird to have a default max of Infinity, but oh well.
|
||||
const max = this[MAX] = options.max || Infinity
|
||||
|
||||
const lc = options.length || naiveLength
|
||||
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
|
||||
this[ALLOW_STALE] = options.stale || false
|
||||
if (options.maxAge && typeof options.maxAge !== 'number')
|
||||
throw new TypeError('maxAge must be a number')
|
||||
this[MAX_AGE] = options.maxAge || 0
|
||||
this[DISPOSE] = options.dispose
|
||||
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
|
||||
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
|
||||
this.reset()
|
||||
}
|
||||
|
||||
// resize the cache when the max changes.
|
||||
set max (mL) {
|
||||
if (typeof mL !== 'number' || mL < 0)
|
||||
throw new TypeError('max must be a non-negative number')
|
||||
|
||||
this[MAX] = mL || Infinity
|
||||
trim(this)
|
||||
}
|
||||
get max () {
|
||||
return this[MAX]
|
||||
}
|
||||
|
||||
set allowStale (allowStale) {
|
||||
this[ALLOW_STALE] = !!allowStale
|
||||
}
|
||||
get allowStale () {
|
||||
return this[ALLOW_STALE]
|
||||
}
|
||||
|
||||
set maxAge (mA) {
|
||||
if (typeof mA !== 'number')
|
||||
throw new TypeError('maxAge must be a non-negative number')
|
||||
|
||||
this[MAX_AGE] = mA
|
||||
trim(this)
|
||||
}
|
||||
get maxAge () {
|
||||
return this[MAX_AGE]
|
||||
}
|
||||
|
||||
// resize the cache when the lengthCalculator changes.
|
||||
set lengthCalculator (lC) {
|
||||
if (typeof lC !== 'function')
|
||||
lC = naiveLength
|
||||
|
||||
if (lC !== this[LENGTH_CALCULATOR]) {
|
||||
this[LENGTH_CALCULATOR] = lC
|
||||
this[LENGTH] = 0
|
||||
this[LRU_LIST].forEach(hit => {
|
||||
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
|
||||
this[LENGTH] += hit.length
|
||||
})
|
||||
}
|
||||
trim(this)
|
||||
}
|
||||
get lengthCalculator () { return this[LENGTH_CALCULATOR] }
|
||||
|
||||
get length () { return this[LENGTH] }
|
||||
get itemCount () { return this[LRU_LIST].length }
|
||||
|
||||
rforEach (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (let walker = this[LRU_LIST].tail; walker !== null;) {
|
||||
const prev = walker.prev
|
||||
forEachStep(this, fn, walker, thisp)
|
||||
walker = prev
|
||||
}
|
||||
}
|
||||
|
||||
forEach (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (let walker = this[LRU_LIST].head; walker !== null;) {
|
||||
const next = walker.next
|
||||
forEachStep(this, fn, walker, thisp)
|
||||
walker = next
|
||||
}
|
||||
}
|
||||
|
||||
keys () {
|
||||
return this[LRU_LIST].toArray().map(k => k.key)
|
||||
}
|
||||
|
||||
values () {
|
||||
return this[LRU_LIST].toArray().map(k => k.value)
|
||||
}
|
||||
|
||||
reset () {
|
||||
if (this[DISPOSE] &&
|
||||
this[LRU_LIST] &&
|
||||
this[LRU_LIST].length) {
|
||||
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
|
||||
}
|
||||
|
||||
this[CACHE] = new Map() // hash of items by key
|
||||
this[LRU_LIST] = new Yallist() // list of items in order of use recency
|
||||
this[LENGTH] = 0 // length of items in the list
|
||||
}
|
||||
|
||||
dump () {
|
||||
return this[LRU_LIST].map(hit =>
|
||||
isStale(this, hit) ? false : {
|
||||
k: hit.key,
|
||||
v: hit.value,
|
||||
e: hit.now + (hit.maxAge || 0)
|
||||
}).toArray().filter(h => h)
|
||||
}
|
||||
|
||||
dumpLru () {
|
||||
return this[LRU_LIST]
|
||||
}
|
||||
|
||||
set (key, value, maxAge) {
|
||||
maxAge = maxAge || this[MAX_AGE]
|
||||
|
||||
if (maxAge && typeof maxAge !== 'number')
|
||||
throw new TypeError('maxAge must be a number')
|
||||
|
||||
const now = maxAge ? Date.now() : 0
|
||||
const len = this[LENGTH_CALCULATOR](value, key)
|
||||
|
||||
if (this[CACHE].has(key)) {
|
||||
if (len > this[MAX]) {
|
||||
del(this, this[CACHE].get(key))
|
||||
return false
|
||||
}
|
||||
|
||||
const node = this[CACHE].get(key)
|
||||
const item = node.value
|
||||
|
||||
// dispose of the old one before overwriting
|
||||
// split out into 2 ifs for better coverage tracking
|
||||
if (this[DISPOSE]) {
|
||||
if (!this[NO_DISPOSE_ON_SET])
|
||||
this[DISPOSE](key, item.value)
|
||||
}
|
||||
|
||||
item.now = now
|
||||
item.maxAge = maxAge
|
||||
item.value = value
|
||||
this[LENGTH] += len - item.length
|
||||
item.length = len
|
||||
this.get(key)
|
||||
trim(this)
|
||||
return true
|
||||
}
|
||||
|
||||
const hit = new Entry(key, value, len, now, maxAge)
|
||||
|
||||
// oversized objects fall out of cache automatically.
|
||||
if (hit.length > this[MAX]) {
|
||||
if (this[DISPOSE])
|
||||
this[DISPOSE](key, value)
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
this[LENGTH] += hit.length
|
||||
this[LRU_LIST].unshift(hit)
|
||||
this[CACHE].set(key, this[LRU_LIST].head)
|
||||
trim(this)
|
||||
return true
|
||||
}
|
||||
|
||||
has (key) {
|
||||
if (!this[CACHE].has(key)) return false
|
||||
const hit = this[CACHE].get(key).value
|
||||
return !isStale(this, hit)
|
||||
}
|
||||
|
||||
get (key) {
|
||||
return get(this, key, true)
|
||||
}
|
||||
|
||||
peek (key) {
|
||||
return get(this, key, false)
|
||||
}
|
||||
|
||||
pop () {
|
||||
const node = this[LRU_LIST].tail
|
||||
if (!node)
|
||||
return null
|
||||
|
||||
del(this, node)
|
||||
return node.value
|
||||
}
|
||||
|
||||
del (key) {
|
||||
del(this, this[CACHE].get(key))
|
||||
}
|
||||
|
||||
load (arr) {
|
||||
// reset the cache
|
||||
this.reset()
|
||||
|
||||
const now = Date.now()
|
||||
// A previous serialized cache has the most recent items first
|
||||
for (let l = arr.length - 1; l >= 0; l--) {
|
||||
const hit = arr[l]
|
||||
const expiresAt = hit.e || 0
|
||||
if (expiresAt === 0)
|
||||
// the item was created without expiration in a non aged cache
|
||||
this.set(hit.k, hit.v)
|
||||
else {
|
||||
const maxAge = expiresAt - now
|
||||
// dont add already expired items
|
||||
if (maxAge > 0) {
|
||||
this.set(hit.k, hit.v, maxAge)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prune () {
|
||||
this[CACHE].forEach((value, key) => get(this, key, false))
|
||||
}
|
||||
}
|
||||
|
||||
const get = (self, key, doUse) => {
|
||||
const node = self[CACHE].get(key)
|
||||
if (node) {
|
||||
const hit = node.value
|
||||
if (isStale(self, hit)) {
|
||||
del(self, node)
|
||||
if (!self[ALLOW_STALE])
|
||||
return undefined
|
||||
} else {
|
||||
if (doUse) {
|
||||
if (self[UPDATE_AGE_ON_GET])
|
||||
node.value.now = Date.now()
|
||||
self[LRU_LIST].unshiftNode(node)
|
||||
}
|
||||
}
|
||||
return hit.value
|
||||
}
|
||||
}
|
||||
|
||||
const isStale = (self, hit) => {
|
||||
if (!hit || (!hit.maxAge && !self[MAX_AGE]))
|
||||
return false
|
||||
|
||||
const diff = Date.now() - hit.now
|
||||
return hit.maxAge ? diff > hit.maxAge
|
||||
: self[MAX_AGE] && (diff > self[MAX_AGE])
|
||||
}
|
||||
|
||||
const trim = self => {
|
||||
if (self[LENGTH] > self[MAX]) {
|
||||
for (let walker = self[LRU_LIST].tail;
|
||||
self[LENGTH] > self[MAX] && walker !== null;) {
|
||||
// We know that we're about to delete this one, and also
|
||||
// what the next least recently used key will be, so just
|
||||
// go ahead and set it now.
|
||||
const prev = walker.prev
|
||||
del(self, walker)
|
||||
walker = prev
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const del = (self, node) => {
|
||||
if (node) {
|
||||
const hit = node.value
|
||||
if (self[DISPOSE])
|
||||
self[DISPOSE](hit.key, hit.value)
|
||||
|
||||
self[LENGTH] -= hit.length
|
||||
self[CACHE].delete(hit.key)
|
||||
self[LRU_LIST].removeNode(node)
|
||||
}
|
||||
}
|
||||
|
||||
class Entry {
|
||||
constructor (key, value, length, now, maxAge) {
|
||||
this.key = key
|
||||
this.value = value
|
||||
this.length = length
|
||||
this.now = now
|
||||
this.maxAge = maxAge || 0
|
||||
}
|
||||
}
|
||||
|
||||
const forEachStep = (self, fn, node, thisp) => {
|
||||
let hit = node.value
|
||||
if (isStale(self, hit)) {
|
||||
del(self, node)
|
||||
if (!self[ALLOW_STALE])
|
||||
hit = undefined
|
||||
}
|
||||
if (hit)
|
||||
fn.call(thisp, hit.value, hit.key, self)
|
||||
}
|
||||
|
||||
module.exports = LRUCache
|
||||
34
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
generated
vendored
Normal file
34
unified-ai-platform/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "lru-cache",
|
||||
"description": "A cache object that deletes the least-recently-used items.",
|
||||
"version": "6.0.0",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me>",
|
||||
"keywords": [
|
||||
"mru",
|
||||
"lru",
|
||||
"cache"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"main": "index.js",
|
||||
"repository": "git://github.com/isaacs/node-lru-cache.git",
|
||||
"devDependencies": {
|
||||
"benchmark": "^2.1.4",
|
||||
"tap": "^14.10.7"
|
||||
},
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
}
|
||||
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
0
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/README.md
generated
vendored
Normal file
0
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/README.md
generated
vendored
Normal file
649
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/index.js
generated
vendored
Normal file
649
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/index.js
generated
vendored
Normal file
@@ -0,0 +1,649 @@
|
||||
'use strict'
|
||||
const proc = typeof process === 'object' && process ? process : {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const SD = require('string_decoder').StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
const EMITDATA = Symbol('emitData')
|
||||
const EMITEND = Symbol('emitEnd')
|
||||
const EMITEND2 = Symbol('emitEnd2')
|
||||
const ASYNC = Symbol('async')
|
||||
|
||||
const defer = fn => Promise.resolve().then(fn)
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|
||||
|| Symbol('asyncIterator not implemented')
|
||||
const ITERATOR = doIter && Symbol.iterator
|
||||
|| Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev =>
|
||||
ev === 'end' ||
|
||||
ev === 'finish' ||
|
||||
ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b => b instanceof ArrayBuffer ||
|
||||
typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
class Pipe {
|
||||
constructor (src, dest, opts) {
|
||||
this.src = src
|
||||
this.dest = dest
|
||||
this.opts = opts
|
||||
this.ondrain = () => src[RESUME]()
|
||||
dest.on('drain', this.ondrain)
|
||||
}
|
||||
unpipe () {
|
||||
this.dest.removeListener('drain', this.ondrain)
|
||||
}
|
||||
// istanbul ignore next - only here for the prototype
|
||||
proxyErrors () {}
|
||||
end () {
|
||||
this.unpipe()
|
||||
if (this.opts.end)
|
||||
this.dest.end()
|
||||
}
|
||||
}
|
||||
|
||||
class PipeProxyErrors extends Pipe {
|
||||
unpipe () {
|
||||
this.src.removeListener('error', this.proxyErrors)
|
||||
super.unpipe()
|
||||
}
|
||||
constructor (src, dest, opts) {
|
||||
super(src, dest, opts)
|
||||
this.proxyErrors = er => dest.emit('error', er)
|
||||
src.on('error', this.proxyErrors)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class Minipass extends Stream {
|
||||
constructor (options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this.pipes = []
|
||||
this.buffer = []
|
||||
this[OBJECTMODE] = options && options.objectMode || false
|
||||
if (this[OBJECTMODE])
|
||||
this[ENCODING] = null
|
||||
else
|
||||
this[ENCODING] = options && options.encoding || null
|
||||
if (this[ENCODING] === 'buffer')
|
||||
this[ENCODING] = null
|
||||
this[ASYNC] = options && !!options.async || false
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
}
|
||||
|
||||
get bufferLength () { return this[BUFFERLENGTH] }
|
||||
|
||||
get encoding () { return this[ENCODING] }
|
||||
set encoding (enc) {
|
||||
if (this[OBJECTMODE])
|
||||
throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (this[ENCODING] && enc !== this[ENCODING] &&
|
||||
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this.buffer.length)
|
||||
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding (enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode () { return this[OBJECTMODE] }
|
||||
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
|
||||
|
||||
get ['async'] () { return this[ASYNC] }
|
||||
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (this[EOF])
|
||||
throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit('error', Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
))
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const fn = this[ASYNC] ? defer : f => f()
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk))
|
||||
chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// handle object mode up front, since it's simpler
|
||||
// this yields better performance, fewer checks later.
|
||||
if (this[OBJECTMODE]) {
|
||||
/* istanbul ignore if - maybe impossible? */
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// at this point the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
if (cb)
|
||||
fn(cb)
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (typeof chunk === 'string' &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
// Note: flushing CAN potentially switch us into not-flowing mode
|
||||
if (this.flowing && this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
if (this.flowing)
|
||||
this.emit('data', chunk)
|
||||
else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
fn(cb)
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read (n) {
|
||||
if (this[DESTROYED])
|
||||
return null
|
||||
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
|
||||
this[MAYBE_EMIT_END]()
|
||||
return null
|
||||
}
|
||||
|
||||
if (this[OBJECTMODE])
|
||||
n = null
|
||||
|
||||
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding)
|
||||
this.buffer = [this.buffer.join('')]
|
||||
else
|
||||
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
|
||||
}
|
||||
|
||||
const ret = this[READ](n || null, this.buffer[0])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[READ] (n, chunk) {
|
||||
if (n === chunk.length || n === null)
|
||||
this[BUFFERSHIFT]()
|
||||
else {
|
||||
this.buffer[0] = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME] () {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this.buffer.length)
|
||||
this[FLUSH]()
|
||||
else if (this[EOF])
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
resume () {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause () {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed () {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing () {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH] (chunk) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] += 1
|
||||
else
|
||||
this[BUFFERLENGTH] += chunk.length
|
||||
this.buffer.push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT] () {
|
||||
if (this.buffer.length) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] -= 1
|
||||
else
|
||||
this[BUFFERLENGTH] -= this.buffer[0].length
|
||||
}
|
||||
return this.buffer.shift()
|
||||
}
|
||||
|
||||
[FLUSH] (noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
|
||||
|
||||
if (!noDrain && !this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK] (chunk) {
|
||||
return chunk ? (this.emit('data', chunk), this.flowing) : false
|
||||
}
|
||||
|
||||
pipe (dest, opts) {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr)
|
||||
opts.end = false
|
||||
else
|
||||
opts.end = opts.end !== false
|
||||
opts.proxyErrors = !!opts.proxyErrors
|
||||
|
||||
// piping an ended stream ends immediately
|
||||
if (ended) {
|
||||
if (opts.end)
|
||||
dest.end()
|
||||
} else {
|
||||
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
|
||||
: new PipeProxyErrors(this, dest, opts))
|
||||
if (this[ASYNC])
|
||||
defer(() => this[RESUME]())
|
||||
else
|
||||
this[RESUME]()
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
|
||||
unpipe (dest) {
|
||||
const p = this.pipes.find(p => p.dest === dest)
|
||||
if (p) {
|
||||
this.pipes.splice(this.pipes.indexOf(p), 1)
|
||||
p.unpipe()
|
||||
}
|
||||
}
|
||||
|
||||
addListener (ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on (ev, fn) {
|
||||
const ret = super.on(ev, fn)
|
||||
if (ev === 'data' && !this.pipes.length && !this.flowing)
|
||||
this[RESUME]()
|
||||
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
|
||||
super.emit('readable')
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
if (this[ASYNC])
|
||||
defer(() => fn.call(this, this[EMITTED_ERROR]))
|
||||
else
|
||||
fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
get emittedEnd () {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END] () {
|
||||
if (!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this.buffer.length === 0 &&
|
||||
this[EOF]) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED])
|
||||
this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit (ev, data, ...extra) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
return !data ? false
|
||||
: this[ASYNC] ? defer(() => this[EMITDATA](data))
|
||||
: this[EMITDATA](data)
|
||||
} else if (ev === 'end') {
|
||||
return this[EMITEND]()
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||||
return
|
||||
const ret = super.emit('close')
|
||||
this.removeAllListeners('close')
|
||||
return ret
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
const ret = super.emit('error', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'resume') {
|
||||
const ret = super.emit('resume')
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
} else if (ev === 'finish' || ev === 'prefinish') {
|
||||
const ret = super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Some other unknown event
|
||||
const ret = super.emit(ev, data, ...extra)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITDATA] (data) {
|
||||
for (const p of this.pipes) {
|
||||
if (p.dest.write(data) === false)
|
||||
this.pause()
|
||||
}
|
||||
const ret = super.emit('data', data)
|
||||
this[MAYBE_EMIT_END]()
|
||||
return ret
|
||||
}
|
||||
|
||||
[EMITEND] () {
|
||||
if (this[EMITTED_END])
|
||||
return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
if (this[ASYNC])
|
||||
defer(() => this[EMITEND2]())
|
||||
else
|
||||
this[EMITEND2]()
|
||||
}
|
||||
|
||||
[EMITEND2] () {
|
||||
if (this[DECODER]) {
|
||||
const data = this[DECODER].end()
|
||||
if (data) {
|
||||
for (const p of this.pipes) {
|
||||
p.dest.write(data)
|
||||
}
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of this.pipes) {
|
||||
p.end()
|
||||
}
|
||||
const ret = super.emit('end')
|
||||
this.removeAllListeners('end')
|
||||
return ret
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect () {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat () {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR] () {
|
||||
const next = () => {
|
||||
const res = this.read()
|
||||
if (res !== null)
|
||||
return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF])
|
||||
return Promise.resolve({ done: true })
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return { next }
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR] () {
|
||||
const next = () => {
|
||||
const value = this.read()
|
||||
const done = value === null
|
||||
return { value, done }
|
||||
}
|
||||
return { next }
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else
|
||||
this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this.buffer.length = 0
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED])
|
||||
this.close()
|
||||
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else // if no error to emit, still reject pending promises
|
||||
this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream (s) {
|
||||
return !!s && (s instanceof Minipass || s instanceof Stream ||
|
||||
s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
))
|
||||
}
|
||||
}
|
||||
56
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/package.json
generated
vendored
Normal file
56
unified-ai-platform/node_modules/make-fetch-happen/node_modules/minipass/package.json
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "minipass",
|
||||
"version": "3.3.6",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.41",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^16.2.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^10.8.1",
|
||||
"typescript": "^4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
}
|
||||
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
8
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/iterator.js
generated
vendored
Normal file
8
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/iterator.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
module.exports = function (Yallist) {
|
||||
Yallist.prototype[Symbol.iterator] = function* () {
|
||||
for (let walker = this.head; walker; walker = walker.next) {
|
||||
yield walker.value
|
||||
}
|
||||
}
|
||||
}
|
||||
29
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/package.json
generated
vendored
Normal file
29
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/package.json
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "yallist",
|
||||
"version": "4.0.0",
|
||||
"description": "Yet Another Linked List",
|
||||
"main": "yallist.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"files": [
|
||||
"yallist.js",
|
||||
"iterator.js"
|
||||
],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"tap": "^12.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js --100",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/yallist.git"
|
||||
},
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC"
|
||||
}
|
||||
426
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/yallist.js
generated
vendored
Normal file
426
unified-ai-platform/node_modules/make-fetch-happen/node_modules/yallist/yallist.js
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
||||
'use strict'
|
||||
module.exports = Yallist
|
||||
|
||||
Yallist.Node = Node
|
||||
Yallist.create = Yallist
|
||||
|
||||
function Yallist (list) {
|
||||
var self = this
|
||||
if (!(self instanceof Yallist)) {
|
||||
self = new Yallist()
|
||||
}
|
||||
|
||||
self.tail = null
|
||||
self.head = null
|
||||
self.length = 0
|
||||
|
||||
if (list && typeof list.forEach === 'function') {
|
||||
list.forEach(function (item) {
|
||||
self.push(item)
|
||||
})
|
||||
} else if (arguments.length > 0) {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
self.push(arguments[i])
|
||||
}
|
||||
}
|
||||
|
||||
return self
|
||||
}
|
||||
|
||||
Yallist.prototype.removeNode = function (node) {
|
||||
if (node.list !== this) {
|
||||
throw new Error('removing node which does not belong to this list')
|
||||
}
|
||||
|
||||
var next = node.next
|
||||
var prev = node.prev
|
||||
|
||||
if (next) {
|
||||
next.prev = prev
|
||||
}
|
||||
|
||||
if (prev) {
|
||||
prev.next = next
|
||||
}
|
||||
|
||||
if (node === this.head) {
|
||||
this.head = next
|
||||
}
|
||||
if (node === this.tail) {
|
||||
this.tail = prev
|
||||
}
|
||||
|
||||
node.list.length--
|
||||
node.next = null
|
||||
node.prev = null
|
||||
node.list = null
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
Yallist.prototype.unshiftNode = function (node) {
|
||||
if (node === this.head) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var head = this.head
|
||||
node.list = this
|
||||
node.next = head
|
||||
if (head) {
|
||||
head.prev = node
|
||||
}
|
||||
|
||||
this.head = node
|
||||
if (!this.tail) {
|
||||
this.tail = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.pushNode = function (node) {
|
||||
if (node === this.tail) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.list) {
|
||||
node.list.removeNode(node)
|
||||
}
|
||||
|
||||
var tail = this.tail
|
||||
node.list = this
|
||||
node.prev = tail
|
||||
if (tail) {
|
||||
tail.next = node
|
||||
}
|
||||
|
||||
this.tail = node
|
||||
if (!this.head) {
|
||||
this.head = node
|
||||
}
|
||||
this.length++
|
||||
}
|
||||
|
||||
Yallist.prototype.push = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
push(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.unshift = function () {
|
||||
for (var i = 0, l = arguments.length; i < l; i++) {
|
||||
unshift(this, arguments[i])
|
||||
}
|
||||
return this.length
|
||||
}
|
||||
|
||||
Yallist.prototype.pop = function () {
|
||||
if (!this.tail) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.tail.value
|
||||
this.tail = this.tail.prev
|
||||
if (this.tail) {
|
||||
this.tail.next = null
|
||||
} else {
|
||||
this.head = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.shift = function () {
|
||||
if (!this.head) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
var res = this.head.value
|
||||
this.head = this.head.next
|
||||
if (this.head) {
|
||||
this.head.prev = null
|
||||
} else {
|
||||
this.tail = null
|
||||
}
|
||||
this.length--
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.forEach = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.head, i = 0; walker !== null; i++) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.next
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.forEachReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
|
||||
fn.call(thisp, walker.value, i, this)
|
||||
walker = walker.prev
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.get = function (n) {
|
||||
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.next
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.getReverse = function (n) {
|
||||
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
|
||||
// abort out of the list early if we hit a cycle
|
||||
walker = walker.prev
|
||||
}
|
||||
if (i === n && walker !== null) {
|
||||
return walker.value
|
||||
}
|
||||
}
|
||||
|
||||
Yallist.prototype.map = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.head; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.next
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.mapReverse = function (fn, thisp) {
|
||||
thisp = thisp || this
|
||||
var res = new Yallist()
|
||||
for (var walker = this.tail; walker !== null;) {
|
||||
res.push(fn.call(thisp, walker.value, this))
|
||||
walker = walker.prev
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
Yallist.prototype.reduce = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.head
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.head) {
|
||||
walker = this.head.next
|
||||
acc = this.head.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = 0; walker !== null; i++) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.reduceReverse = function (fn, initial) {
|
||||
var acc
|
||||
var walker = this.tail
|
||||
if (arguments.length > 1) {
|
||||
acc = initial
|
||||
} else if (this.tail) {
|
||||
walker = this.tail.prev
|
||||
acc = this.tail.value
|
||||
} else {
|
||||
throw new TypeError('Reduce of empty list with no initial value')
|
||||
}
|
||||
|
||||
for (var i = this.length - 1; walker !== null; i--) {
|
||||
acc = fn(acc, walker.value, i)
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
Yallist.prototype.toArray = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.head; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.next
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.toArrayReverse = function () {
|
||||
var arr = new Array(this.length)
|
||||
for (var i = 0, walker = this.tail; walker !== null; i++) {
|
||||
arr[i] = walker.value
|
||||
walker = walker.prev
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
Yallist.prototype.slice = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
for (; walker !== null && i < to; i++, walker = walker.next) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.sliceReverse = function (from, to) {
|
||||
to = to || this.length
|
||||
if (to < 0) {
|
||||
to += this.length
|
||||
}
|
||||
from = from || 0
|
||||
if (from < 0) {
|
||||
from += this.length
|
||||
}
|
||||
var ret = new Yallist()
|
||||
if (to < from || to < 0) {
|
||||
return ret
|
||||
}
|
||||
if (from < 0) {
|
||||
from = 0
|
||||
}
|
||||
if (to > this.length) {
|
||||
to = this.length
|
||||
}
|
||||
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
|
||||
walker = walker.prev
|
||||
}
|
||||
for (; walker !== null && i > from; i--, walker = walker.prev) {
|
||||
ret.push(walker.value)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
|
||||
if (start > this.length) {
|
||||
start = this.length - 1
|
||||
}
|
||||
if (start < 0) {
|
||||
start = this.length + start;
|
||||
}
|
||||
|
||||
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
|
||||
walker = walker.next
|
||||
}
|
||||
|
||||
var ret = []
|
||||
for (var i = 0; walker && i < deleteCount; i++) {
|
||||
ret.push(walker.value)
|
||||
walker = this.removeNode(walker)
|
||||
}
|
||||
if (walker === null) {
|
||||
walker = this.tail
|
||||
}
|
||||
|
||||
if (walker !== this.head && walker !== this.tail) {
|
||||
walker = walker.prev
|
||||
}
|
||||
|
||||
for (var i = 0; i < nodes.length; i++) {
|
||||
walker = insert(this, walker, nodes[i])
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
Yallist.prototype.reverse = function () {
|
||||
var head = this.head
|
||||
var tail = this.tail
|
||||
for (var walker = head; walker !== null; walker = walker.prev) {
|
||||
var p = walker.prev
|
||||
walker.prev = walker.next
|
||||
walker.next = p
|
||||
}
|
||||
this.head = tail
|
||||
this.tail = head
|
||||
return this
|
||||
}
|
||||
|
||||
function insert (self, node, value) {
|
||||
var inserted = node === self.head ?
|
||||
new Node(value, null, node, self) :
|
||||
new Node(value, node, node.next, self)
|
||||
|
||||
if (inserted.next === null) {
|
||||
self.tail = inserted
|
||||
}
|
||||
if (inserted.prev === null) {
|
||||
self.head = inserted
|
||||
}
|
||||
|
||||
self.length++
|
||||
|
||||
return inserted
|
||||
}
|
||||
|
||||
function push (self, item) {
|
||||
self.tail = new Node(item, self.tail, null, self)
|
||||
if (!self.head) {
|
||||
self.head = self.tail
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function unshift (self, item) {
|
||||
self.head = new Node(item, null, self.head, self)
|
||||
if (!self.tail) {
|
||||
self.tail = self.head
|
||||
}
|
||||
self.length++
|
||||
}
|
||||
|
||||
function Node (value, prev, next, list) {
|
||||
if (!(this instanceof Node)) {
|
||||
return new Node(value, prev, next, list)
|
||||
}
|
||||
|
||||
this.list = list
|
||||
this.value = value
|
||||
|
||||
if (prev) {
|
||||
prev.next = this
|
||||
this.prev = prev
|
||||
} else {
|
||||
this.prev = null
|
||||
}
|
||||
|
||||
if (next) {
|
||||
next.prev = this
|
||||
this.next = next
|
||||
} else {
|
||||
this.next = null
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// add if support for Symbol.iterator is present
|
||||
require('./iterator.js')(Yallist)
|
||||
} catch (er) {}
|
||||
Reference in New Issue
Block a user