mirror of
https://github.com/x1xhlol/system-prompts-and-models-of-ai-tools.git
synced 2026-02-04 05:50:50 +00:00
nhj
more
This commit is contained in:
111
unified-ai-platform/node_modules/tar/lib/create.js
generated
vendored
Normal file
111
unified-ai-platform/node_modules/tar/lib/create.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
'use strict'
|
||||
|
||||
// tar -c
|
||||
const hlo = require('./high-level-opt.js')
|
||||
|
||||
const Pack = require('./pack.js')
|
||||
const fsm = require('fs-minipass')
|
||||
const t = require('./list.js')
|
||||
const path = require('path')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
if (typeof files === 'function') {
|
||||
cb = files
|
||||
}
|
||||
|
||||
if (Array.isArray(opt_)) {
|
||||
files = opt_, opt_ = {}
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files) || !files.length) {
|
||||
throw new TypeError('no files or directories specified')
|
||||
}
|
||||
|
||||
files = Array.from(files)
|
||||
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (opt.sync && typeof cb === 'function') {
|
||||
throw new TypeError('callback not supported for sync tar functions')
|
||||
}
|
||||
|
||||
if (!opt.file && typeof cb === 'function') {
|
||||
throw new TypeError('callback only supported with file option')
|
||||
}
|
||||
|
||||
return opt.file && opt.sync ? createFileSync(opt, files)
|
||||
: opt.file ? createFile(opt, files, cb)
|
||||
: opt.sync ? createSync(opt, files)
|
||||
: create(opt, files)
|
||||
}
|
||||
|
||||
const createFileSync = (opt, files) => {
|
||||
const p = new Pack.Sync(opt)
|
||||
const stream = new fsm.WriteStreamSync(opt.file, {
|
||||
mode: opt.mode || 0o666,
|
||||
})
|
||||
p.pipe(stream)
|
||||
addFilesSync(p, files)
|
||||
}
|
||||
|
||||
const createFile = (opt, files, cb) => {
|
||||
const p = new Pack(opt)
|
||||
const stream = new fsm.WriteStream(opt.file, {
|
||||
mode: opt.mode || 0o666,
|
||||
})
|
||||
p.pipe(stream)
|
||||
|
||||
const promise = new Promise((res, rej) => {
|
||||
stream.on('error', rej)
|
||||
stream.on('close', res)
|
||||
p.on('error', rej)
|
||||
})
|
||||
|
||||
addFilesAsync(p, files)
|
||||
|
||||
return cb ? promise.then(cb, cb) : promise
|
||||
}
|
||||
|
||||
const addFilesSync = (p, files) => {
|
||||
files.forEach(file => {
|
||||
if (file.charAt(0) === '@') {
|
||||
t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
sync: true,
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
})
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
})
|
||||
p.end()
|
||||
}
|
||||
|
||||
const addFilesAsync = (p, files) => {
|
||||
while (files.length) {
|
||||
const file = files.shift()
|
||||
if (file.charAt(0) === '@') {
|
||||
return t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
}).then(_ => addFilesAsync(p, files))
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
}
|
||||
p.end()
|
||||
}
|
||||
|
||||
const createSync = (opt, files) => {
|
||||
const p = new Pack.Sync(opt)
|
||||
addFilesSync(p, files)
|
||||
return p
|
||||
}
|
||||
|
||||
const create = (opt, files) => {
|
||||
const p = new Pack(opt)
|
||||
addFilesAsync(p, files)
|
||||
return p
|
||||
}
|
||||
113
unified-ai-platform/node_modules/tar/lib/extract.js
generated
vendored
Normal file
113
unified-ai-platform/node_modules/tar/lib/extract.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
'use strict'
|
||||
|
||||
// tar -x
|
||||
const hlo = require('./high-level-opt.js')
|
||||
const Unpack = require('./unpack.js')
|
||||
const fs = require('fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const path = require('path')
|
||||
const stripSlash = require('./strip-trailing-slashes.js')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
if (typeof opt_ === 'function') {
|
||||
cb = opt_, files = null, opt_ = {}
|
||||
} else if (Array.isArray(opt_)) {
|
||||
files = opt_, opt_ = {}
|
||||
}
|
||||
|
||||
if (typeof files === 'function') {
|
||||
cb = files, files = null
|
||||
}
|
||||
|
||||
if (!files) {
|
||||
files = []
|
||||
} else {
|
||||
files = Array.from(files)
|
||||
}
|
||||
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (opt.sync && typeof cb === 'function') {
|
||||
throw new TypeError('callback not supported for sync tar functions')
|
||||
}
|
||||
|
||||
if (!opt.file && typeof cb === 'function') {
|
||||
throw new TypeError('callback only supported with file option')
|
||||
}
|
||||
|
||||
if (files.length) {
|
||||
filesFilter(opt, files)
|
||||
}
|
||||
|
||||
return opt.file && opt.sync ? extractFileSync(opt)
|
||||
: opt.file ? extractFile(opt, cb)
|
||||
: opt.sync ? extractSync(opt)
|
||||
: extract(opt)
|
||||
}
|
||||
|
||||
// construct a filter that limits the file entries listed
|
||||
// include child entries if a dir is included
|
||||
const filesFilter = (opt, files) => {
|
||||
const map = new Map(files.map(f => [stripSlash(f), true]))
|
||||
const filter = opt.filter
|
||||
|
||||
const mapHas = (file, r) => {
|
||||
const root = r || path.parse(file).root || '.'
|
||||
const ret = file === root ? false
|
||||
: map.has(file) ? map.get(file)
|
||||
: mapHas(path.dirname(file), root)
|
||||
|
||||
map.set(file, ret)
|
||||
return ret
|
||||
}
|
||||
|
||||
opt.filter = filter
|
||||
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
|
||||
: file => mapHas(stripSlash(file))
|
||||
}
|
||||
|
||||
const extractFileSync = opt => {
|
||||
const u = new Unpack.Sync(opt)
|
||||
|
||||
const file = opt.file
|
||||
const stat = fs.statSync(file)
|
||||
// This trades a zero-byte read() syscall for a stat
|
||||
// However, it will usually result in less memory allocation
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
const stream = new fsm.ReadStreamSync(file, {
|
||||
readSize: readSize,
|
||||
size: stat.size,
|
||||
})
|
||||
stream.pipe(u)
|
||||
}
|
||||
|
||||
const extractFile = (opt, cb) => {
|
||||
const u = new Unpack(opt)
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
|
||||
const file = opt.file
|
||||
const p = new Promise((resolve, reject) => {
|
||||
u.on('error', reject)
|
||||
u.on('close', resolve)
|
||||
|
||||
// This trades a zero-byte read() syscall for a stat
|
||||
// However, it will usually result in less memory allocation
|
||||
fs.stat(file, (er, stat) => {
|
||||
if (er) {
|
||||
reject(er)
|
||||
} else {
|
||||
const stream = new fsm.ReadStream(file, {
|
||||
readSize: readSize,
|
||||
size: stat.size,
|
||||
})
|
||||
stream.on('error', reject)
|
||||
stream.pipe(u)
|
||||
}
|
||||
})
|
||||
})
|
||||
return cb ? p.then(cb, cb) : p
|
||||
}
|
||||
|
||||
const extractSync = opt => new Unpack.Sync(opt)
|
||||
|
||||
const extract = opt => new Unpack(opt)
|
||||
20
unified-ai-platform/node_modules/tar/lib/get-write-flag.js
generated
vendored
Normal file
20
unified-ai-platform/node_modules/tar/lib/get-write-flag.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
// Get the appropriate flag to use for creating files
|
||||
// We use fmap on Windows platforms for files less than
|
||||
// 512kb. This is a fairly low limit, but avoids making
|
||||
// things slower in some cases. Since most of what this
|
||||
// library is used for is extracting tarballs of many
|
||||
// relatively small files in npm packages and the like,
|
||||
// it can be a big boost on Windows platforms.
|
||||
// Only supported in Node v12.9.0 and above.
|
||||
const platform = process.env.__FAKE_PLATFORM__ || process.platform
|
||||
const isWindows = platform === 'win32'
|
||||
const fs = global.__FAKE_TESTING_FS__ || require('fs')
|
||||
|
||||
/* istanbul ignore next */
|
||||
const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants
|
||||
|
||||
const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP
|
||||
const fMapLimit = 512 * 1024
|
||||
const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY
|
||||
module.exports = !fMapEnabled ? () => 'w'
|
||||
: size => size < fMapLimit ? fMapFlag : 'w'
|
||||
Reference in New Issue
Block a user