mirror of
https://github.com/x1xhlol/system-prompts-and-models-of-ai-tools.git
synced 2026-02-04 14:00:49 +00:00
nhj
more
This commit is contained in:
21
unified-ai-platform/node_modules/prebuild-install/LICENSE
generated
vendored
Normal file
21
unified-ai-platform/node_modules/prebuild-install/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Mathias Buus
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
44
unified-ai-platform/node_modules/prebuild-install/asset.js
generated
vendored
Normal file
44
unified-ai-platform/node_modules/prebuild-install/asset.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
const get = require('simple-get')
|
||||
const util = require('./util')
|
||||
const proxy = require('./proxy')
|
||||
|
||||
function findAssetId (opts, cb) {
|
||||
const downloadUrl = util.getDownloadUrl(opts)
|
||||
const apiUrl = util.getApiUrl(opts)
|
||||
const log = opts.log || util.noopLogger
|
||||
|
||||
log.http('request', 'GET ' + apiUrl)
|
||||
const reqOpts = proxy({
|
||||
url: apiUrl,
|
||||
json: true,
|
||||
headers: {
|
||||
'User-Agent': 'simple-get',
|
||||
Authorization: 'token ' + opts.token
|
||||
}
|
||||
}, opts)
|
||||
|
||||
const req = get.concat(reqOpts, function (err, res, data) {
|
||||
if (err) return cb(err)
|
||||
log.http(res.statusCode, apiUrl)
|
||||
if (res.statusCode !== 200) return cb(err)
|
||||
|
||||
// Find asset id in release
|
||||
for (const release of data) {
|
||||
if (release.tag_name === opts['tag-prefix'] + opts.pkg.version) {
|
||||
for (const asset of release.assets) {
|
||||
if (asset.browser_download_url === downloadUrl) {
|
||||
return cb(null, asset.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cb(new Error('Could not find GitHub release for version'))
|
||||
})
|
||||
|
||||
req.setTimeout(30 * 1000, function () {
|
||||
req.abort()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = findAssetId
|
||||
78
unified-ai-platform/node_modules/prebuild-install/bin.js
generated
vendored
Normal file
78
unified-ai-platform/node_modules/prebuild-install/bin.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const napi = require('napi-build-utils')
|
||||
|
||||
const pkg = require(path.resolve('package.json'))
|
||||
const rc = require('./rc')(pkg)
|
||||
const log = require('./log')(rc, process.env)
|
||||
const download = require('./download')
|
||||
const asset = require('./asset')
|
||||
const util = require('./util')
|
||||
|
||||
const prebuildClientVersion = require('./package.json').version
|
||||
if (rc.version) {
|
||||
console.log(prebuildClientVersion)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (rc.path) process.chdir(rc.path)
|
||||
|
||||
if (rc.runtime === 'electron' && rc.target[0] === '4' && rc.abi === '64') {
|
||||
log.error(`Electron version ${rc.target} found - skipping prebuild-install work due to known ABI issue`)
|
||||
log.error('More information about this issue can be found at https://github.com/lgeiger/node-abi/issues/54')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!fs.existsSync('package.json')) {
|
||||
log.error('setup', 'No package.json found. Aborting...')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (rc.help) {
|
||||
console.error(fs.readFileSync(path.join(__dirname, 'help.txt'), 'utf-8'))
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
log.info('begin', 'Prebuild-install version', prebuildClientVersion)
|
||||
|
||||
const opts = Object.assign({}, rc, { pkg: pkg, log: log })
|
||||
|
||||
if (napi.isNapiRuntime(rc.runtime)) napi.logUnsupportedVersion(rc.target, log)
|
||||
|
||||
const origin = util.packageOrigin(process.env, pkg)
|
||||
|
||||
if (opts.force) {
|
||||
log.warn('install', 'prebuilt binaries enforced with --force!')
|
||||
log.warn('install', 'prebuilt binaries may be out of date!')
|
||||
} else if (origin && origin.length > 4 && origin.substr(0, 4) === 'git+') {
|
||||
log.info('install', 'installing from git repository, skipping download.')
|
||||
process.exit(1)
|
||||
} else if (opts.buildFromSource) {
|
||||
log.info('install', '--build-from-source specified, not attempting download.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const startDownload = function (downloadUrl) {
|
||||
download(downloadUrl, opts, function (err) {
|
||||
if (err) {
|
||||
log.warn('install', err.message)
|
||||
return process.exit(1)
|
||||
}
|
||||
log.info('install', 'Successfully installed prebuilt binary!')
|
||||
})
|
||||
}
|
||||
|
||||
if (opts.token) {
|
||||
asset(opts, function (err, assetId) {
|
||||
if (err) {
|
||||
log.warn('install', err.message)
|
||||
return process.exit(1)
|
||||
}
|
||||
|
||||
startDownload(util.getAssetUrl(opts, assetId))
|
||||
})
|
||||
} else {
|
||||
startDownload(util.getDownloadUrl(opts))
|
||||
}
|
||||
142
unified-ai-platform/node_modules/prebuild-install/download.js
generated
vendored
Normal file
142
unified-ai-platform/node_modules/prebuild-install/download.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const get = require('simple-get')
|
||||
const pump = require('pump')
|
||||
const tfs = require('tar-fs')
|
||||
const zlib = require('zlib')
|
||||
const util = require('./util')
|
||||
const error = require('./error')
|
||||
const proxy = require('./proxy')
|
||||
const mkdirp = require('mkdirp-classic')
|
||||
|
||||
function downloadPrebuild (downloadUrl, opts, cb) {
|
||||
let cachedPrebuild = util.cachedPrebuild(downloadUrl)
|
||||
const localPrebuild = util.localPrebuild(downloadUrl, opts)
|
||||
const tempFile = util.tempFile(cachedPrebuild)
|
||||
const log = opts.log || util.noopLogger
|
||||
|
||||
if (opts.nolocal) return download()
|
||||
|
||||
log.info('looking for local prebuild @', localPrebuild)
|
||||
fs.access(localPrebuild, fs.R_OK | fs.W_OK, function (err) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
return download()
|
||||
}
|
||||
|
||||
log.info('found local prebuild')
|
||||
cachedPrebuild = localPrebuild
|
||||
unpack()
|
||||
})
|
||||
|
||||
function download () {
|
||||
ensureNpmCacheDir(function (err) {
|
||||
if (err) return onerror(err)
|
||||
|
||||
log.info('looking for cached prebuild @', cachedPrebuild)
|
||||
fs.access(cachedPrebuild, fs.R_OK | fs.W_OK, function (err) {
|
||||
if (!(err && err.code === 'ENOENT')) {
|
||||
log.info('found cached prebuild')
|
||||
return unpack()
|
||||
}
|
||||
|
||||
log.http('request', 'GET ' + downloadUrl)
|
||||
const reqOpts = proxy({ url: downloadUrl }, opts)
|
||||
|
||||
if (opts.token) {
|
||||
reqOpts.headers = {
|
||||
'User-Agent': 'simple-get',
|
||||
Accept: 'application/octet-stream',
|
||||
Authorization: 'token ' + opts.token
|
||||
}
|
||||
}
|
||||
|
||||
const req = get(reqOpts, function (err, res) {
|
||||
if (err) return onerror(err)
|
||||
log.http(res.statusCode, downloadUrl)
|
||||
if (res.statusCode !== 200) return onerror()
|
||||
mkdirp(util.prebuildCache(), function () {
|
||||
log.info('downloading to @', tempFile)
|
||||
pump(res, fs.createWriteStream(tempFile), function (err) {
|
||||
if (err) return onerror(err)
|
||||
fs.rename(tempFile, cachedPrebuild, function (err) {
|
||||
if (err) return cb(err)
|
||||
log.info('renaming to @', cachedPrebuild)
|
||||
unpack()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
req.setTimeout(30 * 1000, function () {
|
||||
req.abort()
|
||||
})
|
||||
})
|
||||
|
||||
function onerror (err) {
|
||||
fs.unlink(tempFile, function () {
|
||||
cb(err || error.noPrebuilts(opts))
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function unpack () {
|
||||
let binaryName
|
||||
|
||||
const updateName = opts.updateName || function (entry) {
|
||||
if (/\.node$/i.test(entry.name)) binaryName = entry.name
|
||||
}
|
||||
|
||||
log.info('unpacking @', cachedPrebuild)
|
||||
|
||||
const options = {
|
||||
readable: true,
|
||||
writable: true,
|
||||
hardlinkAsFilesFallback: true
|
||||
}
|
||||
const extract = tfs.extract(opts.path, options).on('entry', updateName)
|
||||
|
||||
pump(fs.createReadStream(cachedPrebuild), zlib.createGunzip(), extract,
|
||||
function (err) {
|
||||
if (err) return cb(err)
|
||||
|
||||
let resolved
|
||||
if (binaryName) {
|
||||
try {
|
||||
resolved = path.resolve(opts.path || '.', binaryName)
|
||||
} catch (err) {
|
||||
return cb(err)
|
||||
}
|
||||
log.info('unpack', 'resolved to ' + resolved)
|
||||
|
||||
if (opts.runtime === 'node' && opts.platform === process.platform && opts.abi === process.versions.modules && opts.arch === process.arch) {
|
||||
try {
|
||||
require(resolved)
|
||||
} catch (err) {
|
||||
return cb(err)
|
||||
}
|
||||
log.info('unpack', 'required ' + resolved + ' successfully')
|
||||
}
|
||||
}
|
||||
|
||||
cb(null, resolved)
|
||||
})
|
||||
}
|
||||
|
||||
function ensureNpmCacheDir (cb) {
|
||||
const cacheFolder = util.npmCache()
|
||||
fs.access(cacheFolder, fs.R_OK | fs.W_OK, function (err) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
return makeNpmCacheDir()
|
||||
}
|
||||
cb(err)
|
||||
})
|
||||
|
||||
function makeNpmCacheDir () {
|
||||
log.info('npm cache directory missing, creating it...')
|
||||
mkdirp(cacheFolder, cb)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = downloadPrebuild
|
||||
15
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/LICENSE
generated
vendored
Normal file
15
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
3
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/README.md
generated
vendored
Normal file
3
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Like `chown -R`.
|
||||
|
||||
Takes the same arguments as `fs.chown()`
|
||||
167
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/chownr.js
generated
vendored
Normal file
167
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/chownr.js
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
'use strict'
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
/* istanbul ignore next */
|
||||
const LCHOWN = fs.lchown ? 'lchown' : 'chown'
|
||||
/* istanbul ignore next */
|
||||
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
|
||||
|
||||
/* istanbul ignore next */
|
||||
const needEISDIRHandled = fs.lchown &&
|
||||
!process.version.match(/v1[1-9]+\./) &&
|
||||
!process.version.match(/v10\.[6-9]/)
|
||||
|
||||
const lchownSync = (path, uid, gid) => {
|
||||
try {
|
||||
return fs[LCHOWNSYNC](path, uid, gid)
|
||||
} catch (er) {
|
||||
if (er.code !== 'ENOENT')
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
const chownSync = (path, uid, gid) => {
|
||||
try {
|
||||
return fs.chownSync(path, uid, gid)
|
||||
} catch (er) {
|
||||
if (er.code !== 'ENOENT')
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
const handleEISDIR =
|
||||
needEISDIRHandled ? (path, uid, gid, cb) => er => {
|
||||
// Node prior to v10 had a very questionable implementation of
|
||||
// fs.lchown, which would always try to call fs.open on a directory
|
||||
// Fall back to fs.chown in those cases.
|
||||
if (!er || er.code !== 'EISDIR')
|
||||
cb(er)
|
||||
else
|
||||
fs.chown(path, uid, gid, cb)
|
||||
}
|
||||
: (_, __, ___, cb) => cb
|
||||
|
||||
/* istanbul ignore next */
|
||||
const handleEISDirSync =
|
||||
needEISDIRHandled ? (path, uid, gid) => {
|
||||
try {
|
||||
return lchownSync(path, uid, gid)
|
||||
} catch (er) {
|
||||
if (er.code !== 'EISDIR')
|
||||
throw er
|
||||
chownSync(path, uid, gid)
|
||||
}
|
||||
}
|
||||
: (path, uid, gid) => lchownSync(path, uid, gid)
|
||||
|
||||
// fs.readdir could only accept an options object as of node v6
|
||||
const nodeVersion = process.version
|
||||
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
|
||||
let readdirSync = (path, options) => fs.readdirSync(path, options)
|
||||
/* istanbul ignore next */
|
||||
if (/^v4\./.test(nodeVersion))
|
||||
readdir = (path, options, cb) => fs.readdir(path, cb)
|
||||
|
||||
const chown = (cpath, uid, gid, cb) => {
|
||||
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
|
||||
// Skip ENOENT error
|
||||
cb(er && er.code !== 'ENOENT' ? er : null)
|
||||
}))
|
||||
}
|
||||
|
||||
const chownrKid = (p, child, uid, gid, cb) => {
|
||||
if (typeof child === 'string')
|
||||
return fs.lstat(path.resolve(p, child), (er, stats) => {
|
||||
// Skip ENOENT error
|
||||
if (er)
|
||||
return cb(er.code !== 'ENOENT' ? er : null)
|
||||
stats.name = child
|
||||
chownrKid(p, stats, uid, gid, cb)
|
||||
})
|
||||
|
||||
if (child.isDirectory()) {
|
||||
chownr(path.resolve(p, child.name), uid, gid, er => {
|
||||
if (er)
|
||||
return cb(er)
|
||||
const cpath = path.resolve(p, child.name)
|
||||
chown(cpath, uid, gid, cb)
|
||||
})
|
||||
} else {
|
||||
const cpath = path.resolve(p, child.name)
|
||||
chown(cpath, uid, gid, cb)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const chownr = (p, uid, gid, cb) => {
|
||||
readdir(p, { withFileTypes: true }, (er, children) => {
|
||||
// any error other than ENOTDIR or ENOTSUP means it's not readable,
|
||||
// or doesn't exist. give up.
|
||||
if (er) {
|
||||
if (er.code === 'ENOENT')
|
||||
return cb()
|
||||
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
|
||||
return cb(er)
|
||||
}
|
||||
if (er || !children.length)
|
||||
return chown(p, uid, gid, cb)
|
||||
|
||||
let len = children.length
|
||||
let errState = null
|
||||
const then = er => {
|
||||
if (errState)
|
||||
return
|
||||
if (er)
|
||||
return cb(errState = er)
|
||||
if (-- len === 0)
|
||||
return chown(p, uid, gid, cb)
|
||||
}
|
||||
|
||||
children.forEach(child => chownrKid(p, child, uid, gid, then))
|
||||
})
|
||||
}
|
||||
|
||||
const chownrKidSync = (p, child, uid, gid) => {
|
||||
if (typeof child === 'string') {
|
||||
try {
|
||||
const stats = fs.lstatSync(path.resolve(p, child))
|
||||
stats.name = child
|
||||
child = stats
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT')
|
||||
return
|
||||
else
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
if (child.isDirectory())
|
||||
chownrSync(path.resolve(p, child.name), uid, gid)
|
||||
|
||||
handleEISDirSync(path.resolve(p, child.name), uid, gid)
|
||||
}
|
||||
|
||||
const chownrSync = (p, uid, gid) => {
|
||||
let children
|
||||
try {
|
||||
children = readdirSync(p, { withFileTypes: true })
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT')
|
||||
return
|
||||
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
|
||||
return handleEISDirSync(p, uid, gid)
|
||||
else
|
||||
throw er
|
||||
}
|
||||
|
||||
if (children && children.length)
|
||||
children.forEach(child => chownrKidSync(p, child, uid, gid))
|
||||
|
||||
return handleEISDirSync(p, uid, gid)
|
||||
}
|
||||
|
||||
module.exports = chownr
|
||||
chownr.sync = chownrSync
|
||||
29
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/package.json
generated
vendored
Normal file
29
unified-ai-platform/node_modules/prebuild-install/node_modules/chownr/package.json
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"name": "chownr",
|
||||
"description": "like `chown -R`",
|
||||
"version": "1.1.4",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/chownr.git"
|
||||
},
|
||||
"main": "chownr.js",
|
||||
"files": [
|
||||
"chownr.js"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mkdirp": "0.3",
|
||||
"rimraf": "^2.7.1",
|
||||
"tap": "^14.10.6"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"license": "ISC"
|
||||
}
|
||||
47
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
47
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
126
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
126
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||
// prototypally inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
var objectKeys = Object.keys || function (obj) {
|
||||
var keys = [];
|
||||
for (var key in obj) keys.push(key);
|
||||
return keys;
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Duplex;
|
||||
var Readable = require('./_stream_readable');
|
||||
var Writable = require('./_stream_writable');
|
||||
require('inherits')(Duplex, Readable);
|
||||
{
|
||||
// Allow the keys array to be GC'ed.
|
||||
var keys = objectKeys(Writable.prototype);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var method = keys[v];
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||||
}
|
||||
}
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||||
Readable.call(this, options);
|
||||
Writable.call(this, options);
|
||||
this.allowHalfOpen = true;
|
||||
if (options) {
|
||||
if (options.readable === false) this.readable = false;
|
||||
if (options.writable === false) this.writable = false;
|
||||
if (options.allowHalfOpen === false) {
|
||||
this.allowHalfOpen = false;
|
||||
this.once('end', onend);
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState && this._writableState.getBuffer();
|
||||
}
|
||||
});
|
||||
Object.defineProperty(Duplex.prototype, 'writableLength', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.length;
|
||||
}
|
||||
});
|
||||
|
||||
// the no-half-open enforcer
|
||||
function onend() {
|
||||
// If the writable side ended, then we're ok.
|
||||
if (this._writableState.ended) return;
|
||||
|
||||
// no more data can be written.
|
||||
// But allow more writes to happen in this tick.
|
||||
process.nextTick(onEndNT, this);
|
||||
}
|
||||
function onEndNT(self) {
|
||||
self.end();
|
||||
}
|
||||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed;
|
||||
},
|
||||
set: function set(value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._readableState.destroyed = value;
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
37
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
37
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = PassThrough;
|
||||
var Transform = require('./_stream_transform');
|
||||
require('inherits')(PassThrough, Transform);
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||||
Transform.call(this, options);
|
||||
}
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk);
|
||||
};
|
||||
0
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-fs/test/fixtures/e/directory/.ignore
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-fs/test/fixtures/e/directory/.ignore
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-fs/test/fixtures/e/file
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-fs/test/fixtures/e/file
generated
vendored
Normal file
21
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/LICENSE
generated
vendored
Normal file
21
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Mathias Buus
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
257
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/extract.js
generated
vendored
Normal file
257
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/extract.js
generated
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
var util = require('util')
|
||||
var bl = require('bl')
|
||||
var headers = require('./headers')
|
||||
|
||||
var Writable = require('readable-stream').Writable
|
||||
var PassThrough = require('readable-stream').PassThrough
|
||||
|
||||
var noop = function () {}
|
||||
|
||||
var overflow = function (size) {
|
||||
size &= 511
|
||||
return size && 512 - size
|
||||
}
|
||||
|
||||
var emptyStream = function (self, offset) {
|
||||
var s = new Source(self, offset)
|
||||
s.end()
|
||||
return s
|
||||
}
|
||||
|
||||
var mixinPax = function (header, pax) {
|
||||
if (pax.path) header.name = pax.path
|
||||
if (pax.linkpath) header.linkname = pax.linkpath
|
||||
if (pax.size) header.size = parseInt(pax.size, 10)
|
||||
header.pax = pax
|
||||
return header
|
||||
}
|
||||
|
||||
var Source = function (self, offset) {
|
||||
this._parent = self
|
||||
this.offset = offset
|
||||
PassThrough.call(this, { autoDestroy: false })
|
||||
}
|
||||
|
||||
util.inherits(Source, PassThrough)
|
||||
|
||||
Source.prototype.destroy = function (err) {
|
||||
this._parent.destroy(err)
|
||||
}
|
||||
|
||||
var Extract = function (opts) {
|
||||
if (!(this instanceof Extract)) return new Extract(opts)
|
||||
Writable.call(this, opts)
|
||||
|
||||
opts = opts || {}
|
||||
|
||||
this._offset = 0
|
||||
this._buffer = bl()
|
||||
this._missing = 0
|
||||
this._partial = false
|
||||
this._onparse = noop
|
||||
this._header = null
|
||||
this._stream = null
|
||||
this._overflow = null
|
||||
this._cb = null
|
||||
this._locked = false
|
||||
this._destroyed = false
|
||||
this._pax = null
|
||||
this._paxGlobal = null
|
||||
this._gnuLongPath = null
|
||||
this._gnuLongLinkPath = null
|
||||
|
||||
var self = this
|
||||
var b = self._buffer
|
||||
|
||||
var oncontinue = function () {
|
||||
self._continue()
|
||||
}
|
||||
|
||||
var onunlock = function (err) {
|
||||
self._locked = false
|
||||
if (err) return self.destroy(err)
|
||||
if (!self._stream) oncontinue()
|
||||
}
|
||||
|
||||
var onstreamend = function () {
|
||||
self._stream = null
|
||||
var drain = overflow(self._header.size)
|
||||
if (drain) self._parse(drain, ondrain)
|
||||
else self._parse(512, onheader)
|
||||
if (!self._locked) oncontinue()
|
||||
}
|
||||
|
||||
var ondrain = function () {
|
||||
self._buffer.consume(overflow(self._header.size))
|
||||
self._parse(512, onheader)
|
||||
oncontinue()
|
||||
}
|
||||
|
||||
var onpaxglobalheader = function () {
|
||||
var size = self._header.size
|
||||
self._paxGlobal = headers.decodePax(b.slice(0, size))
|
||||
b.consume(size)
|
||||
onstreamend()
|
||||
}
|
||||
|
||||
var onpaxheader = function () {
|
||||
var size = self._header.size
|
||||
self._pax = headers.decodePax(b.slice(0, size))
|
||||
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
|
||||
b.consume(size)
|
||||
onstreamend()
|
||||
}
|
||||
|
||||
var ongnulongpath = function () {
|
||||
var size = self._header.size
|
||||
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
|
||||
b.consume(size)
|
||||
onstreamend()
|
||||
}
|
||||
|
||||
var ongnulonglinkpath = function () {
|
||||
var size = self._header.size
|
||||
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
|
||||
b.consume(size)
|
||||
onstreamend()
|
||||
}
|
||||
|
||||
var onheader = function () {
|
||||
var offset = self._offset
|
||||
var header
|
||||
try {
|
||||
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
|
||||
} catch (err) {
|
||||
self.emit('error', err)
|
||||
}
|
||||
b.consume(512)
|
||||
|
||||
if (!header) {
|
||||
self._parse(512, onheader)
|
||||
oncontinue()
|
||||
return
|
||||
}
|
||||
if (header.type === 'gnu-long-path') {
|
||||
self._parse(header.size, ongnulongpath)
|
||||
oncontinue()
|
||||
return
|
||||
}
|
||||
if (header.type === 'gnu-long-link-path') {
|
||||
self._parse(header.size, ongnulonglinkpath)
|
||||
oncontinue()
|
||||
return
|
||||
}
|
||||
if (header.type === 'pax-global-header') {
|
||||
self._parse(header.size, onpaxglobalheader)
|
||||
oncontinue()
|
||||
return
|
||||
}
|
||||
if (header.type === 'pax-header') {
|
||||
self._parse(header.size, onpaxheader)
|
||||
oncontinue()
|
||||
return
|
||||
}
|
||||
|
||||
if (self._gnuLongPath) {
|
||||
header.name = self._gnuLongPath
|
||||
self._gnuLongPath = null
|
||||
}
|
||||
|
||||
if (self._gnuLongLinkPath) {
|
||||
header.linkname = self._gnuLongLinkPath
|
||||
self._gnuLongLinkPath = null
|
||||
}
|
||||
|
||||
if (self._pax) {
|
||||
self._header = header = mixinPax(header, self._pax)
|
||||
self._pax = null
|
||||
}
|
||||
|
||||
self._locked = true
|
||||
|
||||
if (!header.size || header.type === 'directory') {
|
||||
self._parse(512, onheader)
|
||||
self.emit('entry', header, emptyStream(self, offset), onunlock)
|
||||
return
|
||||
}
|
||||
|
||||
self._stream = new Source(self, offset)
|
||||
|
||||
self.emit('entry', header, self._stream, onunlock)
|
||||
self._parse(header.size, onstreamend)
|
||||
oncontinue()
|
||||
}
|
||||
|
||||
this._onheader = onheader
|
||||
this._parse(512, onheader)
|
||||
}
|
||||
|
||||
util.inherits(Extract, Writable)
|
||||
|
||||
Extract.prototype.destroy = function (err) {
|
||||
if (this._destroyed) return
|
||||
this._destroyed = true
|
||||
|
||||
if (err) this.emit('error', err)
|
||||
this.emit('close')
|
||||
if (this._stream) this._stream.emit('close')
|
||||
}
|
||||
|
||||
Extract.prototype._parse = function (size, onparse) {
|
||||
if (this._destroyed) return
|
||||
this._offset += size
|
||||
this._missing = size
|
||||
if (onparse === this._onheader) this._partial = false
|
||||
this._onparse = onparse
|
||||
}
|
||||
|
||||
Extract.prototype._continue = function () {
|
||||
if (this._destroyed) return
|
||||
var cb = this._cb
|
||||
this._cb = noop
|
||||
if (this._overflow) this._write(this._overflow, undefined, cb)
|
||||
else cb()
|
||||
}
|
||||
|
||||
Extract.prototype._write = function (data, enc, cb) {
|
||||
if (this._destroyed) return
|
||||
|
||||
var s = this._stream
|
||||
var b = this._buffer
|
||||
var missing = this._missing
|
||||
if (data.length) this._partial = true
|
||||
|
||||
// we do not reach end-of-chunk now. just forward it
|
||||
|
||||
if (data.length < missing) {
|
||||
this._missing -= data.length
|
||||
this._overflow = null
|
||||
if (s) return s.write(data, cb)
|
||||
b.append(data)
|
||||
return cb()
|
||||
}
|
||||
|
||||
// end-of-chunk. the parser should call cb.
|
||||
|
||||
this._cb = cb
|
||||
this._missing = 0
|
||||
|
||||
var overflow = null
|
||||
if (data.length > missing) {
|
||||
overflow = data.slice(missing)
|
||||
data = data.slice(0, missing)
|
||||
}
|
||||
|
||||
if (s) s.end(data)
|
||||
else b.append(data)
|
||||
|
||||
this._overflow = overflow
|
||||
this._onparse()
|
||||
}
|
||||
|
||||
Extract.prototype._final = function (cb) {
|
||||
if (this._partial) return this.destroy(new Error('Unexpected end of data'))
|
||||
cb()
|
||||
}
|
||||
|
||||
module.exports = Extract
|
||||
295
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/headers.js
generated
vendored
Normal file
295
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/headers.js
generated
vendored
Normal file
@@ -0,0 +1,295 @@
|
||||
var alloc = Buffer.alloc
|
||||
|
||||
var ZEROS = '0000000000000000000'
|
||||
var SEVENS = '7777777777777777777'
|
||||
var ZERO_OFFSET = '0'.charCodeAt(0)
|
||||
var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary')
|
||||
var USTAR_VER = Buffer.from('00', 'binary')
|
||||
var GNU_MAGIC = Buffer.from('ustar\x20', 'binary')
|
||||
var GNU_VER = Buffer.from('\x20\x00', 'binary')
|
||||
var MASK = parseInt('7777', 8)
|
||||
var MAGIC_OFFSET = 257
|
||||
var VERSION_OFFSET = 263
|
||||
|
||||
var clamp = function (index, len, defaultValue) {
|
||||
if (typeof index !== 'number') return defaultValue
|
||||
index = ~~index // Coerce to integer.
|
||||
if (index >= len) return len
|
||||
if (index >= 0) return index
|
||||
index += len
|
||||
if (index >= 0) return index
|
||||
return 0
|
||||
}
|
||||
|
||||
var toType = function (flag) {
|
||||
switch (flag) {
|
||||
case 0:
|
||||
return 'file'
|
||||
case 1:
|
||||
return 'link'
|
||||
case 2:
|
||||
return 'symlink'
|
||||
case 3:
|
||||
return 'character-device'
|
||||
case 4:
|
||||
return 'block-device'
|
||||
case 5:
|
||||
return 'directory'
|
||||
case 6:
|
||||
return 'fifo'
|
||||
case 7:
|
||||
return 'contiguous-file'
|
||||
case 72:
|
||||
return 'pax-header'
|
||||
case 55:
|
||||
return 'pax-global-header'
|
||||
case 27:
|
||||
return 'gnu-long-link-path'
|
||||
case 28:
|
||||
case 30:
|
||||
return 'gnu-long-path'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
var toTypeflag = function (flag) {
|
||||
switch (flag) {
|
||||
case 'file':
|
||||
return 0
|
||||
case 'link':
|
||||
return 1
|
||||
case 'symlink':
|
||||
return 2
|
||||
case 'character-device':
|
||||
return 3
|
||||
case 'block-device':
|
||||
return 4
|
||||
case 'directory':
|
||||
return 5
|
||||
case 'fifo':
|
||||
return 6
|
||||
case 'contiguous-file':
|
||||
return 7
|
||||
case 'pax-header':
|
||||
return 72
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
var indexOf = function (block, num, offset, end) {
|
||||
for (; offset < end; offset++) {
|
||||
if (block[offset] === num) return offset
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
var cksum = function (block) {
|
||||
var sum = 8 * 32
|
||||
for (var i = 0; i < 148; i++) sum += block[i]
|
||||
for (var j = 156; j < 512; j++) sum += block[j]
|
||||
return sum
|
||||
}
|
||||
|
||||
var encodeOct = function (val, n) {
|
||||
val = val.toString(8)
|
||||
if (val.length > n) return SEVENS.slice(0, n) + ' '
|
||||
else return ZEROS.slice(0, n - val.length) + val + ' '
|
||||
}
|
||||
|
||||
/* Copied from the node-tar repo and modified to meet
|
||||
* tar-stream coding standard.
|
||||
*
|
||||
* Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
|
||||
*/
|
||||
function parse256 (buf) {
|
||||
// first byte MUST be either 80 or FF
|
||||
// 80 for positive, FF for 2's comp
|
||||
var positive
|
||||
if (buf[0] === 0x80) positive = true
|
||||
else if (buf[0] === 0xFF) positive = false
|
||||
else return null
|
||||
|
||||
// build up a base-256 tuple from the least sig to the highest
|
||||
var tuple = []
|
||||
for (var i = buf.length - 1; i > 0; i--) {
|
||||
var byte = buf[i]
|
||||
if (positive) tuple.push(byte)
|
||||
else tuple.push(0xFF - byte)
|
||||
}
|
||||
|
||||
var sum = 0
|
||||
var l = tuple.length
|
||||
for (i = 0; i < l; i++) {
|
||||
sum += tuple[i] * Math.pow(256, i)
|
||||
}
|
||||
|
||||
return positive ? sum : -1 * sum
|
||||
}
|
||||
|
||||
var decodeOct = function (val, offset, length) {
|
||||
val = val.slice(offset, offset + length)
|
||||
offset = 0
|
||||
|
||||
// If prefixed with 0x80 then parse as a base-256 integer
|
||||
if (val[offset] & 0x80) {
|
||||
return parse256(val)
|
||||
} else {
|
||||
// Older versions of tar can prefix with spaces
|
||||
while (offset < val.length && val[offset] === 32) offset++
|
||||
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
|
||||
while (offset < end && val[offset] === 0) offset++
|
||||
if (end === offset) return 0
|
||||
return parseInt(val.slice(offset, end).toString(), 8)
|
||||
}
|
||||
}
|
||||
|
||||
var decodeStr = function (val, offset, length, encoding) {
|
||||
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
|
||||
}
|
||||
|
||||
var addLength = function (str) {
|
||||
var len = Buffer.byteLength(str)
|
||||
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
|
||||
if (len + digits >= Math.pow(10, digits)) digits++
|
||||
|
||||
return (len + digits) + str
|
||||
}
|
||||
|
||||
exports.decodeLongPath = function (buf, encoding) {
|
||||
return decodeStr(buf, 0, buf.length, encoding)
|
||||
}
|
||||
|
||||
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
|
||||
var result = ''
|
||||
if (opts.name) result += addLength(' path=' + opts.name + '\n')
|
||||
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
|
||||
var pax = opts.pax
|
||||
if (pax) {
|
||||
for (var key in pax) {
|
||||
result += addLength(' ' + key + '=' + pax[key] + '\n')
|
||||
}
|
||||
}
|
||||
return Buffer.from(result)
|
||||
}
|
||||
|
||||
exports.decodePax = function (buf) {
|
||||
var result = {}
|
||||
|
||||
while (buf.length) {
|
||||
var i = 0
|
||||
while (i < buf.length && buf[i] !== 32) i++
|
||||
var len = parseInt(buf.slice(0, i).toString(), 10)
|
||||
if (!len) return result
|
||||
|
||||
var b = buf.slice(i + 1, len - 1).toString()
|
||||
var keyIndex = b.indexOf('=')
|
||||
if (keyIndex === -1) return result
|
||||
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
|
||||
|
||||
buf = buf.slice(len)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
exports.encode = function (opts) {
|
||||
var buf = alloc(512)
|
||||
var name = opts.name
|
||||
var prefix = ''
|
||||
|
||||
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
|
||||
if (Buffer.byteLength(name) !== name.length) return null // utf-8
|
||||
|
||||
while (Buffer.byteLength(name) > 100) {
|
||||
var i = name.indexOf('/')
|
||||
if (i === -1) return null
|
||||
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
|
||||
name = name.slice(i + 1)
|
||||
}
|
||||
|
||||
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
|
||||
if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
|
||||
|
||||
buf.write(name)
|
||||
buf.write(encodeOct(opts.mode & MASK, 6), 100)
|
||||
buf.write(encodeOct(opts.uid, 6), 108)
|
||||
buf.write(encodeOct(opts.gid, 6), 116)
|
||||
buf.write(encodeOct(opts.size, 11), 124)
|
||||
buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
|
||||
|
||||
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
|
||||
|
||||
if (opts.linkname) buf.write(opts.linkname, 157)
|
||||
|
||||
USTAR_MAGIC.copy(buf, MAGIC_OFFSET)
|
||||
USTAR_VER.copy(buf, VERSION_OFFSET)
|
||||
if (opts.uname) buf.write(opts.uname, 265)
|
||||
if (opts.gname) buf.write(opts.gname, 297)
|
||||
buf.write(encodeOct(opts.devmajor || 0, 6), 329)
|
||||
buf.write(encodeOct(opts.devminor || 0, 6), 337)
|
||||
|
||||
if (prefix) buf.write(prefix, 345)
|
||||
|
||||
buf.write(encodeOct(cksum(buf), 6), 148)
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
exports.decode = function (buf, filenameEncoding, allowUnknownFormat) {
|
||||
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
|
||||
|
||||
var name = decodeStr(buf, 0, 100, filenameEncoding)
|
||||
var mode = decodeOct(buf, 100, 8)
|
||||
var uid = decodeOct(buf, 108, 8)
|
||||
var gid = decodeOct(buf, 116, 8)
|
||||
var size = decodeOct(buf, 124, 12)
|
||||
var mtime = decodeOct(buf, 136, 12)
|
||||
var type = toType(typeflag)
|
||||
var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
|
||||
var uname = decodeStr(buf, 265, 32)
|
||||
var gname = decodeStr(buf, 297, 32)
|
||||
var devmajor = decodeOct(buf, 329, 8)
|
||||
var devminor = decodeOct(buf, 337, 8)
|
||||
|
||||
var c = cksum(buf)
|
||||
|
||||
// checksum is still initial value if header was null.
|
||||
if (c === 8 * 32) return null
|
||||
|
||||
// valid checksum
|
||||
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
|
||||
|
||||
if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) {
|
||||
// ustar (posix) format.
|
||||
// prepend prefix, if present.
|
||||
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
|
||||
} else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 &&
|
||||
GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) {
|
||||
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
|
||||
// multi-volume tarballs.
|
||||
} else {
|
||||
if (!allowUnknownFormat) {
|
||||
throw new Error('Invalid tar header: unknown format.')
|
||||
}
|
||||
}
|
||||
|
||||
// to support old tar versions that use trailing / to indicate dirs
|
||||
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
|
||||
|
||||
return {
|
||||
name,
|
||||
mode,
|
||||
uid,
|
||||
gid,
|
||||
size,
|
||||
mtime: new Date(1000 * mtime),
|
||||
type,
|
||||
linkname,
|
||||
uname,
|
||||
gname,
|
||||
devmajor,
|
||||
devminor
|
||||
}
|
||||
}
|
||||
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/index.js
generated
vendored
Normal file
0
unified-ai-platform/node_modules/prebuild-install/node_modules/tar-stream/index.js
generated
vendored
Normal file
Reference in New Issue
Block a user