$
This commit is contained in:
15
node_modules/tar/LICENSE
generated
vendored
Normal file
15
node_modules/tar/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
1070
node_modules/tar/README.md
generated
vendored
Normal file
1070
node_modules/tar/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
18
node_modules/tar/index.js
generated
vendored
Normal file
18
node_modules/tar/index.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict'
|
||||
|
||||
// high-level commands
|
||||
exports.c = exports.create = require('./lib/create.js')
|
||||
exports.r = exports.replace = require('./lib/replace.js')
|
||||
exports.t = exports.list = require('./lib/list.js')
|
||||
exports.u = exports.update = require('./lib/update.js')
|
||||
exports.x = exports.extract = require('./lib/extract.js')
|
||||
|
||||
// classes
|
||||
exports.Pack = require('./lib/pack.js')
|
||||
exports.Unpack = require('./lib/unpack.js')
|
||||
exports.Parse = require('./lib/parse.js')
|
||||
exports.ReadEntry = require('./lib/read-entry.js')
|
||||
exports.WriteEntry = require('./lib/write-entry.js')
|
||||
exports.Header = require('./lib/header.js')
|
||||
exports.Pax = require('./lib/pax.js')
|
||||
exports.types = require('./lib/types.js')
|
111
node_modules/tar/lib/create.js
generated
vendored
Normal file
111
node_modules/tar/lib/create.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
'use strict'
|
||||
|
||||
// tar -c
|
||||
const hlo = require('./high-level-opt.js')
|
||||
|
||||
const Pack = require('./pack.js')
|
||||
const fsm = require('fs-minipass')
|
||||
const t = require('./list.js')
|
||||
const path = require('path')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
if (typeof files === 'function') {
|
||||
cb = files
|
||||
}
|
||||
|
||||
if (Array.isArray(opt_)) {
|
||||
files = opt_, opt_ = {}
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files) || !files.length) {
|
||||
throw new TypeError('no files or directories specified')
|
||||
}
|
||||
|
||||
files = Array.from(files)
|
||||
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (opt.sync && typeof cb === 'function') {
|
||||
throw new TypeError('callback not supported for sync tar functions')
|
||||
}
|
||||
|
||||
if (!opt.file && typeof cb === 'function') {
|
||||
throw new TypeError('callback only supported with file option')
|
||||
}
|
||||
|
||||
return opt.file && opt.sync ? createFileSync(opt, files)
|
||||
: opt.file ? createFile(opt, files, cb)
|
||||
: opt.sync ? createSync(opt, files)
|
||||
: create(opt, files)
|
||||
}
|
||||
|
||||
const createFileSync = (opt, files) => {
|
||||
const p = new Pack.Sync(opt)
|
||||
const stream = new fsm.WriteStreamSync(opt.file, {
|
||||
mode: opt.mode || 0o666,
|
||||
})
|
||||
p.pipe(stream)
|
||||
addFilesSync(p, files)
|
||||
}
|
||||
|
||||
const createFile = (opt, files, cb) => {
|
||||
const p = new Pack(opt)
|
||||
const stream = new fsm.WriteStream(opt.file, {
|
||||
mode: opt.mode || 0o666,
|
||||
})
|
||||
p.pipe(stream)
|
||||
|
||||
const promise = new Promise((res, rej) => {
|
||||
stream.on('error', rej)
|
||||
stream.on('close', res)
|
||||
p.on('error', rej)
|
||||
})
|
||||
|
||||
addFilesAsync(p, files)
|
||||
|
||||
return cb ? promise.then(cb, cb) : promise
|
||||
}
|
||||
|
||||
const addFilesSync = (p, files) => {
|
||||
files.forEach(file => {
|
||||
if (file.charAt(0) === '@') {
|
||||
t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
sync: true,
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
})
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
})
|
||||
p.end()
|
||||
}
|
||||
|
||||
const addFilesAsync = (p, files) => {
|
||||
while (files.length) {
|
||||
const file = files.shift()
|
||||
if (file.charAt(0) === '@') {
|
||||
return t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
}).then(_ => addFilesAsync(p, files))
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
}
|
||||
p.end()
|
||||
}
|
||||
|
||||
const createSync = (opt, files) => {
|
||||
const p = new Pack.Sync(opt)
|
||||
addFilesSync(p, files)
|
||||
return p
|
||||
}
|
||||
|
||||
const create = (opt, files) => {
|
||||
const p = new Pack(opt)
|
||||
addFilesAsync(p, files)
|
||||
return p
|
||||
}
|
113
node_modules/tar/lib/extract.js
generated
vendored
Normal file
113
node_modules/tar/lib/extract.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
'use strict'
|
||||
|
||||
// tar -x
|
||||
const hlo = require('./high-level-opt.js')
|
||||
const Unpack = require('./unpack.js')
|
||||
const fs = require('fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const path = require('path')
|
||||
const stripSlash = require('./strip-trailing-slashes.js')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
if (typeof opt_ === 'function') {
|
||||
cb = opt_, files = null, opt_ = {}
|
||||
} else if (Array.isArray(opt_)) {
|
||||
files = opt_, opt_ = {}
|
||||
}
|
||||
|
||||
if (typeof files === 'function') {
|
||||
cb = files, files = null
|
||||
}
|
||||
|
||||
if (!files) {
|
||||
files = []
|
||||
} else {
|
||||
files = Array.from(files)
|
||||
}
|
||||
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (opt.sync && typeof cb === 'function') {
|
||||
throw new TypeError('callback not supported for sync tar functions')
|
||||
}
|
||||
|
||||
if (!opt.file && typeof cb === 'function') {
|
||||
throw new TypeError('callback only supported with file option')
|
||||
}
|
||||
|
||||
if (files.length) {
|
||||
filesFilter(opt, files)
|
||||
}
|
||||
|
||||
return opt.file && opt.sync ? extractFileSync(opt)
|
||||
: opt.file ? extractFile(opt, cb)
|
||||
: opt.sync ? extractSync(opt)
|
||||
: extract(opt)
|
||||
}
|
||||
|
||||
// construct a filter that limits the file entries listed
|
||||
// include child entries if a dir is included
|
||||
const filesFilter = (opt, files) => {
|
||||
const map = new Map(files.map(f => [stripSlash(f), true]))
|
||||
const filter = opt.filter
|
||||
|
||||
const mapHas = (file, r) => {
|
||||
const root = r || path.parse(file).root || '.'
|
||||
const ret = file === root ? false
|
||||
: map.has(file) ? map.get(file)
|
||||
: mapHas(path.dirname(file), root)
|
||||
|
||||
map.set(file, ret)
|
||||
return ret
|
||||
}
|
||||
|
||||
opt.filter = filter
|
||||
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
|
||||
: file => mapHas(stripSlash(file))
|
||||
}
|
||||
|
||||
const extractFileSync = opt => {
|
||||
const u = new Unpack.Sync(opt)
|
||||
|
||||
const file = opt.file
|
||||
const stat = fs.statSync(file)
|
||||
// This trades a zero-byte read() syscall for a stat
|
||||
// However, it will usually result in less memory allocation
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
const stream = new fsm.ReadStreamSync(file, {
|
||||
readSize: readSize,
|
||||
size: stat.size,
|
||||
})
|
||||
stream.pipe(u)
|
||||
}
|
||||
|
||||
const extractFile = (opt, cb) => {
|
||||
const u = new Unpack(opt)
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
|
||||
const file = opt.file
|
||||
const p = new Promise((resolve, reject) => {
|
||||
u.on('error', reject)
|
||||
u.on('close', resolve)
|
||||
|
||||
// This trades a zero-byte read() syscall for a stat
|
||||
// However, it will usually result in less memory allocation
|
||||
fs.stat(file, (er, stat) => {
|
||||
if (er) {
|
||||
reject(er)
|
||||
} else {
|
||||
const stream = new fsm.ReadStream(file, {
|
||||
readSize: readSize,
|
||||
size: stat.size,
|
||||
})
|
||||
stream.on('error', reject)
|
||||
stream.pipe(u)
|
||||
}
|
||||
})
|
||||
})
|
||||
return cb ? p.then(cb, cb) : p
|
||||
}
|
||||
|
||||
const extractSync = opt => new Unpack.Sync(opt)
|
||||
|
||||
const extract = opt => new Unpack(opt)
|
20
node_modules/tar/lib/get-write-flag.js
generated
vendored
Normal file
20
node_modules/tar/lib/get-write-flag.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
// Get the appropriate flag to use for creating files
|
||||
// We use fmap on Windows platforms for files less than
|
||||
// 512kb. This is a fairly low limit, but avoids making
|
||||
// things slower in some cases. Since most of what this
|
||||
// library is used for is extracting tarballs of many
|
||||
// relatively small files in npm packages and the like,
|
||||
// it can be a big boost on Windows platforms.
|
||||
// Only supported in Node v12.9.0 and above.
|
||||
const platform = process.env.__FAKE_PLATFORM__ || process.platform
|
||||
const isWindows = platform === 'win32'
|
||||
const fs = global.__FAKE_TESTING_FS__ || require('fs')
|
||||
|
||||
/* istanbul ignore next */
|
||||
const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants
|
||||
|
||||
const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP
|
||||
const fMapLimit = 512 * 1024
|
||||
const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY
|
||||
module.exports = !fMapEnabled ? () => 'w'
|
||||
: size => size < fMapLimit ? fMapFlag : 'w'
|
304
node_modules/tar/lib/header.js
generated
vendored
Normal file
304
node_modules/tar/lib/header.js
generated
vendored
Normal file
@@ -0,0 +1,304 @@
|
||||
'use strict'
|
||||
// parse a 512-byte header block to a data object, or vice-versa
|
||||
// encode returns `true` if a pax extended header is needed, because
|
||||
// the data could not be faithfully encoded in a simple header.
|
||||
// (Also, check header.needPax to see if it needs a pax header.)
|
||||
|
||||
const types = require('./types.js')
|
||||
const pathModule = require('path').posix
|
||||
const large = require('./large-numbers.js')
|
||||
|
||||
const SLURP = Symbol('slurp')
|
||||
const TYPE = Symbol('type')
|
||||
|
||||
class Header {
|
||||
constructor (data, off, ex, gex) {
|
||||
this.cksumValid = false
|
||||
this.needPax = false
|
||||
this.nullBlock = false
|
||||
|
||||
this.block = null
|
||||
this.path = null
|
||||
this.mode = null
|
||||
this.uid = null
|
||||
this.gid = null
|
||||
this.size = null
|
||||
this.mtime = null
|
||||
this.cksum = null
|
||||
this[TYPE] = '0'
|
||||
this.linkpath = null
|
||||
this.uname = null
|
||||
this.gname = null
|
||||
this.devmaj = 0
|
||||
this.devmin = 0
|
||||
this.atime = null
|
||||
this.ctime = null
|
||||
|
||||
if (Buffer.isBuffer(data)) {
|
||||
this.decode(data, off || 0, ex, gex)
|
||||
} else if (data) {
|
||||
this.set(data)
|
||||
}
|
||||
}
|
||||
|
||||
decode (buf, off, ex, gex) {
|
||||
if (!off) {
|
||||
off = 0
|
||||
}
|
||||
|
||||
if (!buf || !(buf.length >= off + 512)) {
|
||||
throw new Error('need 512 bytes for header')
|
||||
}
|
||||
|
||||
this.path = decString(buf, off, 100)
|
||||
this.mode = decNumber(buf, off + 100, 8)
|
||||
this.uid = decNumber(buf, off + 108, 8)
|
||||
this.gid = decNumber(buf, off + 116, 8)
|
||||
this.size = decNumber(buf, off + 124, 12)
|
||||
this.mtime = decDate(buf, off + 136, 12)
|
||||
this.cksum = decNumber(buf, off + 148, 12)
|
||||
|
||||
// if we have extended or global extended headers, apply them now
|
||||
// See https://github.com/npm/node-tar/pull/187
|
||||
this[SLURP](ex)
|
||||
this[SLURP](gex, true)
|
||||
|
||||
// old tar versions marked dirs as a file with a trailing /
|
||||
this[TYPE] = decString(buf, off + 156, 1)
|
||||
if (this[TYPE] === '') {
|
||||
this[TYPE] = '0'
|
||||
}
|
||||
if (this[TYPE] === '0' && this.path.slice(-1) === '/') {
|
||||
this[TYPE] = '5'
|
||||
}
|
||||
|
||||
// tar implementations sometimes incorrectly put the stat(dir).size
|
||||
// as the size in the tarball, even though Directory entries are
|
||||
// not able to have any body at all. In the very rare chance that
|
||||
// it actually DOES have a body, we weren't going to do anything with
|
||||
// it anyway, and it'll just be a warning about an invalid header.
|
||||
if (this[TYPE] === '5') {
|
||||
this.size = 0
|
||||
}
|
||||
|
||||
this.linkpath = decString(buf, off + 157, 100)
|
||||
if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
|
||||
this.uname = decString(buf, off + 265, 32)
|
||||
this.gname = decString(buf, off + 297, 32)
|
||||
this.devmaj = decNumber(buf, off + 329, 8)
|
||||
this.devmin = decNumber(buf, off + 337, 8)
|
||||
if (buf[off + 475] !== 0) {
|
||||
// definitely a prefix, definitely >130 chars.
|
||||
const prefix = decString(buf, off + 345, 155)
|
||||
this.path = prefix + '/' + this.path
|
||||
} else {
|
||||
const prefix = decString(buf, off + 345, 130)
|
||||
if (prefix) {
|
||||
this.path = prefix + '/' + this.path
|
||||
}
|
||||
this.atime = decDate(buf, off + 476, 12)
|
||||
this.ctime = decDate(buf, off + 488, 12)
|
||||
}
|
||||
}
|
||||
|
||||
let sum = 8 * 0x20
|
||||
for (let i = off; i < off + 148; i++) {
|
||||
sum += buf[i]
|
||||
}
|
||||
|
||||
for (let i = off + 156; i < off + 512; i++) {
|
||||
sum += buf[i]
|
||||
}
|
||||
|
||||
this.cksumValid = sum === this.cksum
|
||||
if (this.cksum === null && sum === 8 * 0x20) {
|
||||
this.nullBlock = true
|
||||
}
|
||||
}
|
||||
|
||||
[SLURP] (ex, global) {
|
||||
for (const k in ex) {
|
||||
// we slurp in everything except for the path attribute in
|
||||
// a global extended header, because that's weird.
|
||||
if (ex[k] !== null && ex[k] !== undefined &&
|
||||
!(global && k === 'path')) {
|
||||
this[k] = ex[k]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
encode (buf, off) {
|
||||
if (!buf) {
|
||||
buf = this.block = Buffer.alloc(512)
|
||||
off = 0
|
||||
}
|
||||
|
||||
if (!off) {
|
||||
off = 0
|
||||
}
|
||||
|
||||
if (!(buf.length >= off + 512)) {
|
||||
throw new Error('need 512 bytes for header')
|
||||
}
|
||||
|
||||
const prefixSize = this.ctime || this.atime ? 130 : 155
|
||||
const split = splitPrefix(this.path || '', prefixSize)
|
||||
const path = split[0]
|
||||
const prefix = split[1]
|
||||
this.needPax = split[2]
|
||||
|
||||
this.needPax = encString(buf, off, 100, path) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
|
||||
this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
|
||||
buf[off + 156] = this[TYPE].charCodeAt(0)
|
||||
this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
|
||||
buf.write('ustar\u000000', off + 257, 8)
|
||||
this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
|
||||
this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
|
||||
this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
|
||||
this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
|
||||
if (buf[off + 475] !== 0) {
|
||||
this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
|
||||
} else {
|
||||
this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
|
||||
this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
|
||||
this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
|
||||
}
|
||||
|
||||
let sum = 8 * 0x20
|
||||
for (let i = off; i < off + 148; i++) {
|
||||
sum += buf[i]
|
||||
}
|
||||
|
||||
for (let i = off + 156; i < off + 512; i++) {
|
||||
sum += buf[i]
|
||||
}
|
||||
|
||||
this.cksum = sum
|
||||
encNumber(buf, off + 148, 8, this.cksum)
|
||||
this.cksumValid = true
|
||||
|
||||
return this.needPax
|
||||
}
|
||||
|
||||
set (data) {
|
||||
for (const i in data) {
|
||||
if (data[i] !== null && data[i] !== undefined) {
|
||||
this[i] = data[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get type () {
|
||||
return types.name.get(this[TYPE]) || this[TYPE]
|
||||
}
|
||||
|
||||
get typeKey () {
|
||||
return this[TYPE]
|
||||
}
|
||||
|
||||
set type (type) {
|
||||
if (types.code.has(type)) {
|
||||
this[TYPE] = types.code.get(type)
|
||||
} else {
|
||||
this[TYPE] = type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const splitPrefix = (p, prefixSize) => {
|
||||
const pathSize = 100
|
||||
let pp = p
|
||||
let prefix = ''
|
||||
let ret
|
||||
const root = pathModule.parse(p).root || '.'
|
||||
|
||||
if (Buffer.byteLength(pp) < pathSize) {
|
||||
ret = [pp, prefix, false]
|
||||
} else {
|
||||
// first set prefix to the dir, and path to the base
|
||||
prefix = pathModule.dirname(pp)
|
||||
pp = pathModule.basename(pp)
|
||||
|
||||
do {
|
||||
if (Buffer.byteLength(pp) <= pathSize &&
|
||||
Buffer.byteLength(prefix) <= prefixSize) {
|
||||
// both fit!
|
||||
ret = [pp, prefix, false]
|
||||
} else if (Buffer.byteLength(pp) > pathSize &&
|
||||
Buffer.byteLength(prefix) <= prefixSize) {
|
||||
// prefix fits in prefix, but path doesn't fit in path
|
||||
ret = [pp.slice(0, pathSize - 1), prefix, true]
|
||||
} else {
|
||||
// make path take a bit from prefix
|
||||
pp = pathModule.join(pathModule.basename(prefix), pp)
|
||||
prefix = pathModule.dirname(prefix)
|
||||
}
|
||||
} while (prefix !== root && !ret)
|
||||
|
||||
// at this point, found no resolution, just truncate
|
||||
if (!ret) {
|
||||
ret = [p.slice(0, pathSize - 1), '', true]
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
const decString = (buf, off, size) =>
|
||||
buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
|
||||
|
||||
const decDate = (buf, off, size) =>
|
||||
numToDate(decNumber(buf, off, size))
|
||||
|
||||
const numToDate = num => num === null ? null : new Date(num * 1000)
|
||||
|
||||
const decNumber = (buf, off, size) =>
|
||||
buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
|
||||
: decSmallNumber(buf, off, size)
|
||||
|
||||
const nanNull = value => isNaN(value) ? null : value
|
||||
|
||||
const decSmallNumber = (buf, off, size) =>
|
||||
nanNull(parseInt(
|
||||
buf.slice(off, off + size)
|
||||
.toString('utf8').replace(/\0.*$/, '').trim(), 8))
|
||||
|
||||
// the maximum encodable as a null-terminated octal, by field size
|
||||
const MAXNUM = {
|
||||
12: 0o77777777777,
|
||||
8: 0o7777777,
|
||||
}
|
||||
|
||||
const encNumber = (buf, off, size, number) =>
|
||||
number === null ? false :
|
||||
number > MAXNUM[size] || number < 0
|
||||
? (large.encode(number, buf.slice(off, off + size)), true)
|
||||
: (encSmallNumber(buf, off, size, number), false)
|
||||
|
||||
const encSmallNumber = (buf, off, size, number) =>
|
||||
buf.write(octalString(number, size), off, size, 'ascii')
|
||||
|
||||
const octalString = (number, size) =>
|
||||
padOctal(Math.floor(number).toString(8), size)
|
||||
|
||||
const padOctal = (string, size) =>
|
||||
(string.length === size - 1 ? string
|
||||
: new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
|
||||
|
||||
const encDate = (buf, off, size, date) =>
|
||||
date === null ? false :
|
||||
encNumber(buf, off, size, date.getTime() / 1000)
|
||||
|
||||
// enough to fill the longest string we've got
|
||||
const NULLS = new Array(156).join('\0')
|
||||
// pad with nulls, return true if it's longer or non-ascii
|
||||
const encString = (buf, off, size, string) =>
|
||||
string === null ? false :
|
||||
(buf.write(string + NULLS, off, size, 'utf8'),
|
||||
string.length !== Buffer.byteLength(string) || string.length > size)
|
||||
|
||||
module.exports = Header
|
29
node_modules/tar/lib/high-level-opt.js
generated
vendored
Normal file
29
node_modules/tar/lib/high-level-opt.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
// turn tar(1) style args like `C` into the more verbose things like `cwd`
|
||||
|
||||
const argmap = new Map([
|
||||
['C', 'cwd'],
|
||||
['f', 'file'],
|
||||
['z', 'gzip'],
|
||||
['P', 'preservePaths'],
|
||||
['U', 'unlink'],
|
||||
['strip-components', 'strip'],
|
||||
['stripComponents', 'strip'],
|
||||
['keep-newer', 'newer'],
|
||||
['keepNewer', 'newer'],
|
||||
['keep-newer-files', 'newer'],
|
||||
['keepNewerFiles', 'newer'],
|
||||
['k', 'keep'],
|
||||
['keep-existing', 'keep'],
|
||||
['keepExisting', 'keep'],
|
||||
['m', 'noMtime'],
|
||||
['no-mtime', 'noMtime'],
|
||||
['p', 'preserveOwner'],
|
||||
['L', 'follow'],
|
||||
['h', 'follow'],
|
||||
])
|
||||
|
||||
module.exports = opt => opt ? Object.keys(opt).map(k => [
|
||||
argmap.has(k) ? argmap.get(k) : k, opt[k],
|
||||
]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
|
104
node_modules/tar/lib/large-numbers.js
generated
vendored
Normal file
104
node_modules/tar/lib/large-numbers.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
'use strict'
|
||||
// Tar can encode large and negative numbers using a leading byte of
|
||||
// 0xff for negative, and 0x80 for positive.
|
||||
|
||||
const encode = (num, buf) => {
|
||||
if (!Number.isSafeInteger(num)) {
|
||||
// The number is so large that javascript cannot represent it with integer
|
||||
// precision.
|
||||
throw Error('cannot encode number outside of javascript safe integer range')
|
||||
} else if (num < 0) {
|
||||
encodeNegative(num, buf)
|
||||
} else {
|
||||
encodePositive(num, buf)
|
||||
}
|
||||
return buf
|
||||
}
|
||||
|
||||
const encodePositive = (num, buf) => {
|
||||
buf[0] = 0x80
|
||||
|
||||
for (var i = buf.length; i > 1; i--) {
|
||||
buf[i - 1] = num & 0xff
|
||||
num = Math.floor(num / 0x100)
|
||||
}
|
||||
}
|
||||
|
||||
const encodeNegative = (num, buf) => {
|
||||
buf[0] = 0xff
|
||||
var flipped = false
|
||||
num = num * -1
|
||||
for (var i = buf.length; i > 1; i--) {
|
||||
var byte = num & 0xff
|
||||
num = Math.floor(num / 0x100)
|
||||
if (flipped) {
|
||||
buf[i - 1] = onesComp(byte)
|
||||
} else if (byte === 0) {
|
||||
buf[i - 1] = 0
|
||||
} else {
|
||||
flipped = true
|
||||
buf[i - 1] = twosComp(byte)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parse = (buf) => {
|
||||
const pre = buf[0]
|
||||
const value = pre === 0x80 ? pos(buf.slice(1, buf.length))
|
||||
: pre === 0xff ? twos(buf)
|
||||
: null
|
||||
if (value === null) {
|
||||
throw Error('invalid base256 encoding')
|
||||
}
|
||||
|
||||
if (!Number.isSafeInteger(value)) {
|
||||
// The number is so large that javascript cannot represent it with integer
|
||||
// precision.
|
||||
throw Error('parsed number outside of javascript safe integer range')
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
const twos = (buf) => {
|
||||
var len = buf.length
|
||||
var sum = 0
|
||||
var flipped = false
|
||||
for (var i = len - 1; i > -1; i--) {
|
||||
var byte = buf[i]
|
||||
var f
|
||||
if (flipped) {
|
||||
f = onesComp(byte)
|
||||
} else if (byte === 0) {
|
||||
f = byte
|
||||
} else {
|
||||
flipped = true
|
||||
f = twosComp(byte)
|
||||
}
|
||||
if (f !== 0) {
|
||||
sum -= f * Math.pow(256, len - i - 1)
|
||||
}
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
const pos = (buf) => {
|
||||
var len = buf.length
|
||||
var sum = 0
|
||||
for (var i = len - 1; i > -1; i--) {
|
||||
var byte = buf[i]
|
||||
if (byte !== 0) {
|
||||
sum += byte * Math.pow(256, len - i - 1)
|
||||
}
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
const onesComp = byte => (0xff ^ byte) & 0xff
|
||||
|
||||
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
|
||||
|
||||
module.exports = {
|
||||
encode,
|
||||
parse,
|
||||
}
|
139
node_modules/tar/lib/list.js
generated
vendored
Normal file
139
node_modules/tar/lib/list.js
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
'use strict'
|
||||
|
||||
// XXX: This shares a lot in common with extract.js
|
||||
// maybe some DRY opportunity here?
|
||||
|
||||
// tar -t
|
||||
const hlo = require('./high-level-opt.js')
|
||||
const Parser = require('./parse.js')
|
||||
const fs = require('fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const path = require('path')
|
||||
const stripSlash = require('./strip-trailing-slashes.js')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
if (typeof opt_ === 'function') {
|
||||
cb = opt_, files = null, opt_ = {}
|
||||
} else if (Array.isArray(opt_)) {
|
||||
files = opt_, opt_ = {}
|
||||
}
|
||||
|
||||
if (typeof files === 'function') {
|
||||
cb = files, files = null
|
||||
}
|
||||
|
||||
if (!files) {
|
||||
files = []
|
||||
} else {
|
||||
files = Array.from(files)
|
||||
}
|
||||
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (opt.sync && typeof cb === 'function') {
|
||||
throw new TypeError('callback not supported for sync tar functions')
|
||||
}
|
||||
|
||||
if (!opt.file && typeof cb === 'function') {
|
||||
throw new TypeError('callback only supported with file option')
|
||||
}
|
||||
|
||||
if (files.length) {
|
||||
filesFilter(opt, files)
|
||||
}
|
||||
|
||||
if (!opt.noResume) {
|
||||
onentryFunction(opt)
|
||||
}
|
||||
|
||||
return opt.file && opt.sync ? listFileSync(opt)
|
||||
: opt.file ? listFile(opt, cb)
|
||||
: list(opt)
|
||||
}
|
||||
|
||||
const onentryFunction = opt => {
|
||||
const onentry = opt.onentry
|
||||
opt.onentry = onentry ? e => {
|
||||
onentry(e)
|
||||
e.resume()
|
||||
} : e => e.resume()
|
||||
}
|
||||
|
||||
// construct a filter that limits the file entries listed
|
||||
// include child entries if a dir is included
|
||||
const filesFilter = (opt, files) => {
|
||||
const map = new Map(files.map(f => [stripSlash(f), true]))
|
||||
const filter = opt.filter
|
||||
|
||||
const mapHas = (file, r) => {
|
||||
const root = r || path.parse(file).root || '.'
|
||||
const ret = file === root ? false
|
||||
: map.has(file) ? map.get(file)
|
||||
: mapHas(path.dirname(file), root)
|
||||
|
||||
map.set(file, ret)
|
||||
return ret
|
||||
}
|
||||
|
||||
opt.filter = filter
|
||||
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
|
||||
: file => mapHas(stripSlash(file))
|
||||
}
|
||||
|
||||
const listFileSync = opt => {
|
||||
const p = list(opt)
|
||||
const file = opt.file
|
||||
let threw = true
|
||||
let fd
|
||||
try {
|
||||
const stat = fs.statSync(file)
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
if (stat.size < readSize) {
|
||||
p.end(fs.readFileSync(file))
|
||||
} else {
|
||||
let pos = 0
|
||||
const buf = Buffer.allocUnsafe(readSize)
|
||||
fd = fs.openSync(file, 'r')
|
||||
while (pos < stat.size) {
|
||||
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
|
||||
pos += bytesRead
|
||||
p.write(buf.slice(0, bytesRead))
|
||||
}
|
||||
p.end()
|
||||
}
|
||||
threw = false
|
||||
} finally {
|
||||
if (threw && fd) {
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const listFile = (opt, cb) => {
|
||||
const parse = new Parser(opt)
|
||||
const readSize = opt.maxReadSize || 16 * 1024 * 1024
|
||||
|
||||
const file = opt.file
|
||||
const p = new Promise((resolve, reject) => {
|
||||
parse.on('error', reject)
|
||||
parse.on('end', resolve)
|
||||
|
||||
fs.stat(file, (er, stat) => {
|
||||
if (er) {
|
||||
reject(er)
|
||||
} else {
|
||||
const stream = new fsm.ReadStream(file, {
|
||||
readSize: readSize,
|
||||
size: stat.size,
|
||||
})
|
||||
stream.on('error', reject)
|
||||
stream.pipe(parse)
|
||||
}
|
||||
})
|
||||
})
|
||||
return cb ? p.then(cb, cb) : p
|
||||
}
|
||||
|
||||
const list = opt => new Parser(opt)
|
229
node_modules/tar/lib/mkdir.js
generated
vendored
Normal file
229
node_modules/tar/lib/mkdir.js
generated
vendored
Normal file
@@ -0,0 +1,229 @@
|
||||
'use strict'
|
||||
// wrapper around mkdirp for tar's needs.
|
||||
|
||||
// TODO: This should probably be a class, not functionally
|
||||
// passing around state in a gazillion args.
|
||||
|
||||
const mkdirp = require('mkdirp')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const chownr = require('chownr')
|
||||
const normPath = require('./normalize-windows-path.js')
|
||||
|
||||
class SymlinkError extends Error {
|
||||
constructor (symlink, path) {
|
||||
super('Cannot extract through symbolic link')
|
||||
this.path = path
|
||||
this.symlink = symlink
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'SylinkError'
|
||||
}
|
||||
}
|
||||
|
||||
class CwdError extends Error {
|
||||
constructor (path, code) {
|
||||
super(code + ': Cannot cd into \'' + path + '\'')
|
||||
this.path = path
|
||||
this.code = code
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'CwdError'
|
||||
}
|
||||
}
|
||||
|
||||
const cGet = (cache, key) => cache.get(normPath(key))
|
||||
const cSet = (cache, key, val) => cache.set(normPath(key), val)
|
||||
|
||||
const checkCwd = (dir, cb) => {
|
||||
fs.stat(dir, (er, st) => {
|
||||
if (er || !st.isDirectory()) {
|
||||
er = new CwdError(dir, er && er.code || 'ENOTDIR')
|
||||
}
|
||||
cb(er)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = (dir, opt, cb) => {
|
||||
dir = normPath(dir)
|
||||
|
||||
// if there's any overlap between mask and mode,
|
||||
// then we'll need an explicit chmod
|
||||
const umask = opt.umask
|
||||
const mode = opt.mode | 0o0700
|
||||
const needChmod = (mode & umask) !== 0
|
||||
|
||||
const uid = opt.uid
|
||||
const gid = opt.gid
|
||||
const doChown = typeof uid === 'number' &&
|
||||
typeof gid === 'number' &&
|
||||
(uid !== opt.processUid || gid !== opt.processGid)
|
||||
|
||||
const preserve = opt.preserve
|
||||
const unlink = opt.unlink
|
||||
const cache = opt.cache
|
||||
const cwd = normPath(opt.cwd)
|
||||
|
||||
const done = (er, created) => {
|
||||
if (er) {
|
||||
cb(er)
|
||||
} else {
|
||||
cSet(cache, dir, true)
|
||||
if (created && doChown) {
|
||||
chownr(created, uid, gid, er => done(er))
|
||||
} else if (needChmod) {
|
||||
fs.chmod(dir, mode, cb)
|
||||
} else {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cache && cGet(cache, dir) === true) {
|
||||
return done()
|
||||
}
|
||||
|
||||
if (dir === cwd) {
|
||||
return checkCwd(dir, done)
|
||||
}
|
||||
|
||||
if (preserve) {
|
||||
return mkdirp(dir, { mode }).then(made => done(null, made), done)
|
||||
}
|
||||
|
||||
const sub = normPath(path.relative(cwd, dir))
|
||||
const parts = sub.split('/')
|
||||
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
|
||||
}
|
||||
|
||||
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
|
||||
if (!parts.length) {
|
||||
return cb(null, created)
|
||||
}
|
||||
const p = parts.shift()
|
||||
const part = normPath(path.resolve(base + '/' + p))
|
||||
if (cGet(cache, part)) {
|
||||
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
|
||||
}
|
||||
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
|
||||
}
|
||||
|
||||
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
|
||||
if (er) {
|
||||
fs.lstat(part, (statEr, st) => {
|
||||
if (statEr) {
|
||||
statEr.path = statEr.path && normPath(statEr.path)
|
||||
cb(statEr)
|
||||
} else if (st.isDirectory()) {
|
||||
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
|
||||
} else if (unlink) {
|
||||
fs.unlink(part, er => {
|
||||
if (er) {
|
||||
return cb(er)
|
||||
}
|
||||
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
|
||||
})
|
||||
} else if (st.isSymbolicLink()) {
|
||||
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
|
||||
} else {
|
||||
cb(er)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
created = created || part
|
||||
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
|
||||
}
|
||||
}
|
||||
|
||||
const checkCwdSync = dir => {
|
||||
let ok = false
|
||||
let code = 'ENOTDIR'
|
||||
try {
|
||||
ok = fs.statSync(dir).isDirectory()
|
||||
} catch (er) {
|
||||
code = er.code
|
||||
} finally {
|
||||
if (!ok) {
|
||||
throw new CwdError(dir, code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.sync = (dir, opt) => {
|
||||
dir = normPath(dir)
|
||||
// if there's any overlap between mask and mode,
|
||||
// then we'll need an explicit chmod
|
||||
const umask = opt.umask
|
||||
const mode = opt.mode | 0o0700
|
||||
const needChmod = (mode & umask) !== 0
|
||||
|
||||
const uid = opt.uid
|
||||
const gid = opt.gid
|
||||
const doChown = typeof uid === 'number' &&
|
||||
typeof gid === 'number' &&
|
||||
(uid !== opt.processUid || gid !== opt.processGid)
|
||||
|
||||
const preserve = opt.preserve
|
||||
const unlink = opt.unlink
|
||||
const cache = opt.cache
|
||||
const cwd = normPath(opt.cwd)
|
||||
|
||||
const done = (created) => {
|
||||
cSet(cache, dir, true)
|
||||
if (created && doChown) {
|
||||
chownr.sync(created, uid, gid)
|
||||
}
|
||||
if (needChmod) {
|
||||
fs.chmodSync(dir, mode)
|
||||
}
|
||||
}
|
||||
|
||||
if (cache && cGet(cache, dir) === true) {
|
||||
return done()
|
||||
}
|
||||
|
||||
if (dir === cwd) {
|
||||
checkCwdSync(cwd)
|
||||
return done()
|
||||
}
|
||||
|
||||
if (preserve) {
|
||||
return done(mkdirp.sync(dir, mode))
|
||||
}
|
||||
|
||||
const sub = normPath(path.relative(cwd, dir))
|
||||
const parts = sub.split('/')
|
||||
let created = null
|
||||
for (let p = parts.shift(), part = cwd;
|
||||
p && (part += '/' + p);
|
||||
p = parts.shift()) {
|
||||
part = normPath(path.resolve(part))
|
||||
if (cGet(cache, part)) {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
fs.mkdirSync(part, mode)
|
||||
created = created || part
|
||||
cSet(cache, part, true)
|
||||
} catch (er) {
|
||||
const st = fs.lstatSync(part)
|
||||
if (st.isDirectory()) {
|
||||
cSet(cache, part, true)
|
||||
continue
|
||||
} else if (unlink) {
|
||||
fs.unlinkSync(part)
|
||||
fs.mkdirSync(part, mode)
|
||||
created = created || part
|
||||
cSet(cache, part, true)
|
||||
continue
|
||||
} else if (st.isSymbolicLink()) {
|
||||
return new SymlinkError(part, part + '/' + parts.join('/'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return done(created)
|
||||
}
|
27
node_modules/tar/lib/mode-fix.js
generated
vendored
Normal file
27
node_modules/tar/lib/mode-fix.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
module.exports = (mode, isDir, portable) => {
|
||||
mode &= 0o7777
|
||||
|
||||
// in portable mode, use the minimum reasonable umask
|
||||
// if this system creates files with 0o664 by default
|
||||
// (as some linux distros do), then we'll write the
|
||||
// archive with 0o644 instead. Also, don't ever create
|
||||
// a file that is not readable/writable by the owner.
|
||||
if (portable) {
|
||||
mode = (mode | 0o600) & ~0o22
|
||||
}
|
||||
|
||||
// if dirs are readable, then they should be listable
|
||||
if (isDir) {
|
||||
if (mode & 0o400) {
|
||||
mode |= 0o100
|
||||
}
|
||||
if (mode & 0o40) {
|
||||
mode |= 0o10
|
||||
}
|
||||
if (mode & 0o4) {
|
||||
mode |= 0o1
|
||||
}
|
||||
}
|
||||
return mode
|
||||
}
|
12
node_modules/tar/lib/normalize-unicode.js
generated
vendored
Normal file
12
node_modules/tar/lib/normalize-unicode.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
// warning: extremely hot code path.
|
||||
// This has been meticulously optimized for use
|
||||
// within npm install on large package trees.
|
||||
// Do not edit without careful benchmarking.
|
||||
const normalizeCache = Object.create(null)
|
||||
const { hasOwnProperty } = Object.prototype
|
||||
module.exports = s => {
|
||||
if (!hasOwnProperty.call(normalizeCache, s)) {
|
||||
normalizeCache[s] = s.normalize('NFKD')
|
||||
}
|
||||
return normalizeCache[s]
|
||||
}
|
8
node_modules/tar/lib/normalize-windows-path.js
generated
vendored
Normal file
8
node_modules/tar/lib/normalize-windows-path.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
// on windows, either \ or / are valid directory separators.
|
||||
// on unix, \ is a valid character in filenames.
|
||||
// so, on windows, and only on windows, we replace all \ chars with /,
|
||||
// so that we can use / as our one and only directory separator char.
|
||||
|
||||
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
|
||||
module.exports = platform !== 'win32' ? p => p
|
||||
: p => p && p.replace(/\\/g, '/')
|
420
node_modules/tar/lib/pack.js
generated
vendored
Normal file
420
node_modules/tar/lib/pack.js
generated
vendored
Normal file
@@ -0,0 +1,420 @@
|
||||
'use strict'
|
||||
|
||||
// A readable tar stream creator
|
||||
// Technically, this is a transform stream that you write paths into,
|
||||
// and tar format comes out of.
|
||||
// The `add()` method is like `write()` but returns this,
|
||||
// and end() return `this` as well, so you can
|
||||
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
|
||||
// You could also do something like:
|
||||
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
|
||||
|
||||
class PackJob {
|
||||
constructor (path, absolute) {
|
||||
this.path = path || './'
|
||||
this.absolute = absolute
|
||||
this.entry = null
|
||||
this.stat = null
|
||||
this.readdir = null
|
||||
this.pending = false
|
||||
this.ignore = false
|
||||
this.piped = false
|
||||
}
|
||||
}
|
||||
|
||||
const MiniPass = require('minipass')
|
||||
const zlib = require('minizlib')
|
||||
const ReadEntry = require('./read-entry.js')
|
||||
const WriteEntry = require('./write-entry.js')
|
||||
const WriteEntrySync = WriteEntry.Sync
|
||||
const WriteEntryTar = WriteEntry.Tar
|
||||
const Yallist = require('yallist')
|
||||
const EOF = Buffer.alloc(1024)
|
||||
const ONSTAT = Symbol('onStat')
|
||||
const ENDED = Symbol('ended')
|
||||
const QUEUE = Symbol('queue')
|
||||
const CURRENT = Symbol('current')
|
||||
const PROCESS = Symbol('process')
|
||||
const PROCESSING = Symbol('processing')
|
||||
const PROCESSJOB = Symbol('processJob')
|
||||
const JOBS = Symbol('jobs')
|
||||
const JOBDONE = Symbol('jobDone')
|
||||
const ADDFSENTRY = Symbol('addFSEntry')
|
||||
const ADDTARENTRY = Symbol('addTarEntry')
|
||||
const STAT = Symbol('stat')
|
||||
const READDIR = Symbol('readdir')
|
||||
const ONREADDIR = Symbol('onreaddir')
|
||||
const PIPE = Symbol('pipe')
|
||||
const ENTRY = Symbol('entry')
|
||||
const ENTRYOPT = Symbol('entryOpt')
|
||||
const WRITEENTRYCLASS = Symbol('writeEntryClass')
|
||||
const WRITE = Symbol('write')
|
||||
const ONDRAIN = Symbol('ondrain')
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const warner = require('./warn-mixin.js')
|
||||
const normPath = require('./normalize-windows-path.js')
|
||||
|
||||
const Pack = warner(class Pack extends MiniPass {
|
||||
constructor (opt) {
|
||||
super(opt)
|
||||
opt = opt || Object.create(null)
|
||||
this.opt = opt
|
||||
this.file = opt.file || ''
|
||||
this.cwd = opt.cwd || process.cwd()
|
||||
this.maxReadSize = opt.maxReadSize
|
||||
this.preservePaths = !!opt.preservePaths
|
||||
this.strict = !!opt.strict
|
||||
this.noPax = !!opt.noPax
|
||||
this.prefix = normPath(opt.prefix || '')
|
||||
this.linkCache = opt.linkCache || new Map()
|
||||
this.statCache = opt.statCache || new Map()
|
||||
this.readdirCache = opt.readdirCache || new Map()
|
||||
|
||||
this[WRITEENTRYCLASS] = WriteEntry
|
||||
if (typeof opt.onwarn === 'function') {
|
||||
this.on('warn', opt.onwarn)
|
||||
}
|
||||
|
||||
this.portable = !!opt.portable
|
||||
this.zip = null
|
||||
if (opt.gzip) {
|
||||
if (typeof opt.gzip !== 'object') {
|
||||
opt.gzip = {}
|
||||
}
|
||||
if (this.portable) {
|
||||
opt.gzip.portable = true
|
||||
}
|
||||
this.zip = new zlib.Gzip(opt.gzip)
|
||||
this.zip.on('data', chunk => super.write(chunk))
|
||||
this.zip.on('end', _ => super.end())
|
||||
this.zip.on('drain', _ => this[ONDRAIN]())
|
||||
this.on('resume', _ => this.zip.resume())
|
||||
} else {
|
||||
this.on('drain', this[ONDRAIN])
|
||||
}
|
||||
|
||||
this.noDirRecurse = !!opt.noDirRecurse
|
||||
this.follow = !!opt.follow
|
||||
this.noMtime = !!opt.noMtime
|
||||
this.mtime = opt.mtime || null
|
||||
|
||||
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
|
||||
|
||||
this[QUEUE] = new Yallist()
|
||||
this[JOBS] = 0
|
||||
this.jobs = +opt.jobs || 4
|
||||
this[PROCESSING] = false
|
||||
this[ENDED] = false
|
||||
}
|
||||
|
||||
[WRITE] (chunk) {
|
||||
return super.write(chunk)
|
||||
}
|
||||
|
||||
add (path) {
|
||||
this.write(path)
|
||||
return this
|
||||
}
|
||||
|
||||
end (path) {
|
||||
if (path) {
|
||||
this.write(path)
|
||||
}
|
||||
this[ENDED] = true
|
||||
this[PROCESS]()
|
||||
return this
|
||||
}
|
||||
|
||||
write (path) {
|
||||
if (this[ENDED]) {
|
||||
throw new Error('write after end')
|
||||
}
|
||||
|
||||
if (path instanceof ReadEntry) {
|
||||
this[ADDTARENTRY](path)
|
||||
} else {
|
||||
this[ADDFSENTRY](path)
|
||||
}
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
[ADDTARENTRY] (p) {
|
||||
const absolute = normPath(path.resolve(this.cwd, p.path))
|
||||
// in this case, we don't have to wait for the stat
|
||||
if (!this.filter(p.path, p)) {
|
||||
p.resume()
|
||||
} else {
|
||||
const job = new PackJob(p.path, absolute, false)
|
||||
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
|
||||
job.entry.on('end', _ => this[JOBDONE](job))
|
||||
this[JOBS] += 1
|
||||
this[QUEUE].push(job)
|
||||
}
|
||||
|
||||
this[PROCESS]()
|
||||
}
|
||||
|
||||
[ADDFSENTRY] (p) {
|
||||
const absolute = normPath(path.resolve(this.cwd, p))
|
||||
this[QUEUE].push(new PackJob(p, absolute))
|
||||
this[PROCESS]()
|
||||
}
|
||||
|
||||
[STAT] (job) {
|
||||
job.pending = true
|
||||
this[JOBS] += 1
|
||||
const stat = this.follow ? 'stat' : 'lstat'
|
||||
fs[stat](job.absolute, (er, stat) => {
|
||||
job.pending = false
|
||||
this[JOBS] -= 1
|
||||
if (er) {
|
||||
this.emit('error', er)
|
||||
} else {
|
||||
this[ONSTAT](job, stat)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
[ONSTAT] (job, stat) {
|
||||
this.statCache.set(job.absolute, stat)
|
||||
job.stat = stat
|
||||
|
||||
// now we have the stat, we can filter it.
|
||||
if (!this.filter(job.path, stat)) {
|
||||
job.ignore = true
|
||||
}
|
||||
|
||||
this[PROCESS]()
|
||||
}
|
||||
|
||||
[READDIR] (job) {
|
||||
job.pending = true
|
||||
this[JOBS] += 1
|
||||
fs.readdir(job.absolute, (er, entries) => {
|
||||
job.pending = false
|
||||
this[JOBS] -= 1
|
||||
if (er) {
|
||||
return this.emit('error', er)
|
||||
}
|
||||
this[ONREADDIR](job, entries)
|
||||
})
|
||||
}
|
||||
|
||||
[ONREADDIR] (job, entries) {
|
||||
this.readdirCache.set(job.absolute, entries)
|
||||
job.readdir = entries
|
||||
this[PROCESS]()
|
||||
}
|
||||
|
||||
[PROCESS] () {
|
||||
if (this[PROCESSING]) {
|
||||
return
|
||||
}
|
||||
|
||||
this[PROCESSING] = true
|
||||
for (let w = this[QUEUE].head;
|
||||
w !== null && this[JOBS] < this.jobs;
|
||||
w = w.next) {
|
||||
this[PROCESSJOB](w.value)
|
||||
if (w.value.ignore) {
|
||||
const p = w.next
|
||||
this[QUEUE].removeNode(w)
|
||||
w.next = p
|
||||
}
|
||||
}
|
||||
|
||||
this[PROCESSING] = false
|
||||
|
||||
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
|
||||
if (this.zip) {
|
||||
this.zip.end(EOF)
|
||||
} else {
|
||||
super.write(EOF)
|
||||
super.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get [CURRENT] () {
|
||||
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
|
||||
}
|
||||
|
||||
[JOBDONE] (job) {
|
||||
this[QUEUE].shift()
|
||||
this[JOBS] -= 1
|
||||
this[PROCESS]()
|
||||
}
|
||||
|
||||
[PROCESSJOB] (job) {
|
||||
if (job.pending) {
|
||||
return
|
||||
}
|
||||
|
||||
if (job.entry) {
|
||||
if (job === this[CURRENT] && !job.piped) {
|
||||
this[PIPE](job)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!job.stat) {
|
||||
if (this.statCache.has(job.absolute)) {
|
||||
this[ONSTAT](job, this.statCache.get(job.absolute))
|
||||
} else {
|
||||
this[STAT](job)
|
||||
}
|
||||
}
|
||||
if (!job.stat) {
|
||||
return
|
||||
}
|
||||
|
||||
// filtered out!
|
||||
if (job.ignore) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
|
||||
if (this.readdirCache.has(job.absolute)) {
|
||||
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
|
||||
} else {
|
||||
this[READDIR](job)
|
||||
}
|
||||
if (!job.readdir) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// we know it doesn't have an entry, because that got checked above
|
||||
job.entry = this[ENTRY](job)
|
||||
if (!job.entry) {
|
||||
job.ignore = true
|
||||
return
|
||||
}
|
||||
|
||||
if (job === this[CURRENT] && !job.piped) {
|
||||
this[PIPE](job)
|
||||
}
|
||||
}
|
||||
|
||||
[ENTRYOPT] (job) {
|
||||
return {
|
||||
onwarn: (code, msg, data) => this.warn(code, msg, data),
|
||||
noPax: this.noPax,
|
||||
cwd: this.cwd,
|
||||
absolute: job.absolute,
|
||||
preservePaths: this.preservePaths,
|
||||
maxReadSize: this.maxReadSize,
|
||||
strict: this.strict,
|
||||
portable: this.portable,
|
||||
linkCache: this.linkCache,
|
||||
statCache: this.statCache,
|
||||
noMtime: this.noMtime,
|
||||
mtime: this.mtime,
|
||||
prefix: this.prefix,
|
||||
}
|
||||
}
|
||||
|
||||
[ENTRY] (job) {
|
||||
this[JOBS] += 1
|
||||
try {
|
||||
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
|
||||
.on('end', () => this[JOBDONE](job))
|
||||
.on('error', er => this.emit('error', er))
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
|
||||
[ONDRAIN] () {
|
||||
if (this[CURRENT] && this[CURRENT].entry) {
|
||||
this[CURRENT].entry.resume()
|
||||
}
|
||||
}
|
||||
|
||||
// like .pipe() but using super, because our write() is special
|
||||
[PIPE] (job) {
|
||||
job.piped = true
|
||||
|
||||
if (job.readdir) {
|
||||
job.readdir.forEach(entry => {
|
||||
const p = job.path
|
||||
const base = p === './' ? '' : p.replace(/\/*$/, '/')
|
||||
this[ADDFSENTRY](base + entry)
|
||||
})
|
||||
}
|
||||
|
||||
const source = job.entry
|
||||
const zip = this.zip
|
||||
|
||||
if (zip) {
|
||||
source.on('data', chunk => {
|
||||
if (!zip.write(chunk)) {
|
||||
source.pause()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
source.on('data', chunk => {
|
||||
if (!super.write(chunk)) {
|
||||
source.pause()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pause () {
|
||||
if (this.zip) {
|
||||
this.zip.pause()
|
||||
}
|
||||
return super.pause()
|
||||
}
|
||||
})
|
||||
|
||||
class PackSync extends Pack {
|
||||
constructor (opt) {
|
||||
super(opt)
|
||||
this[WRITEENTRYCLASS] = WriteEntrySync
|
||||
}
|
||||
|
||||
// pause/resume are no-ops in sync streams.
|
||||
pause () {}
|
||||
resume () {}
|
||||
|
||||
[STAT] (job) {
|
||||
const stat = this.follow ? 'statSync' : 'lstatSync'
|
||||
this[ONSTAT](job, fs[stat](job.absolute))
|
||||
}
|
||||
|
||||
[READDIR] (job, stat) {
|
||||
this[ONREADDIR](job, fs.readdirSync(job.absolute))
|
||||
}
|
||||
|
||||
// gotta get it all in this tick
|
||||
[PIPE] (job) {
|
||||
const source = job.entry
|
||||
const zip = this.zip
|
||||
|
||||
if (job.readdir) {
|
||||
job.readdir.forEach(entry => {
|
||||
const p = job.path
|
||||
const base = p === './' ? '' : p.replace(/\/*$/, '/')
|
||||
this[ADDFSENTRY](base + entry)
|
||||
})
|
||||
}
|
||||
|
||||
if (zip) {
|
||||
source.on('data', chunk => {
|
||||
zip.write(chunk)
|
||||
})
|
||||
} else {
|
||||
source.on('data', chunk => {
|
||||
super[WRITE](chunk)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Pack.Sync = PackSync
|
||||
|
||||
module.exports = Pack
|
509
node_modules/tar/lib/parse.js
generated
vendored
Normal file
509
node_modules/tar/lib/parse.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
156
node_modules/tar/lib/path-reservations.js
generated
vendored
Normal file
156
node_modules/tar/lib/path-reservations.js
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
// A path exclusive reservation system
|
||||
// reserve([list, of, paths], fn)
|
||||
// When the fn is first in line for all its paths, it
|
||||
// is called with a cb that clears the reservation.
|
||||
//
|
||||
// Used by async unpack to avoid clobbering paths in use,
|
||||
// while still allowing maximal safe parallelization.
|
||||
|
||||
const assert = require('assert')
|
||||
const normalize = require('./normalize-unicode.js')
|
||||
const stripSlashes = require('./strip-trailing-slashes.js')
|
||||
const { join } = require('path')
|
||||
|
||||
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
|
||||
const isWindows = platform === 'win32'
|
||||
|
||||
module.exports = () => {
|
||||
// path => [function or Set]
|
||||
// A Set object means a directory reservation
|
||||
// A fn is a direct reservation on that path
|
||||
const queues = new Map()
|
||||
|
||||
// fn => {paths:[path,...], dirs:[path, ...]}
|
||||
const reservations = new Map()
|
||||
|
||||
// return a set of parent dirs for a given path
|
||||
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
|
||||
const getDirs = path => {
|
||||
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
|
||||
if (set.length) {
|
||||
path = join(set[set.length - 1], path)
|
||||
}
|
||||
set.push(path || '/')
|
||||
return set
|
||||
}, [])
|
||||
return dirs
|
||||
}
|
||||
|
||||
// functions currently running
|
||||
const running = new Set()
|
||||
|
||||
// return the queues for each path the function cares about
|
||||
// fn => {paths, dirs}
|
||||
const getQueues = fn => {
|
||||
const res = reservations.get(fn)
|
||||
/* istanbul ignore if - unpossible */
|
||||
if (!res) {
|
||||
throw new Error('function does not have any path reservations')
|
||||
}
|
||||
return {
|
||||
paths: res.paths.map(path => queues.get(path)),
|
||||
dirs: [...res.dirs].map(path => queues.get(path)),
|
||||
}
|
||||
}
|
||||
|
||||
// check if fn is first in line for all its paths, and is
|
||||
// included in the first set for all its dir queues
|
||||
const check = fn => {
|
||||
const { paths, dirs } = getQueues(fn)
|
||||
return paths.every(q => q[0] === fn) &&
|
||||
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
|
||||
}
|
||||
|
||||
// run the function if it's first in line and not already running
|
||||
const run = fn => {
|
||||
if (running.has(fn) || !check(fn)) {
|
||||
return false
|
||||
}
|
||||
running.add(fn)
|
||||
fn(() => clear(fn))
|
||||
return true
|
||||
}
|
||||
|
||||
const clear = fn => {
|
||||
if (!running.has(fn)) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { paths, dirs } = reservations.get(fn)
|
||||
const next = new Set()
|
||||
|
||||
paths.forEach(path => {
|
||||
const q = queues.get(path)
|
||||
assert.equal(q[0], fn)
|
||||
if (q.length === 1) {
|
||||
queues.delete(path)
|
||||
} else {
|
||||
q.shift()
|
||||
if (typeof q[0] === 'function') {
|
||||
next.add(q[0])
|
||||
} else {
|
||||
q[0].forEach(fn => next.add(fn))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
dirs.forEach(dir => {
|
||||
const q = queues.get(dir)
|
||||
assert(q[0] instanceof Set)
|
||||
if (q[0].size === 1 && q.length === 1) {
|
||||
queues.delete(dir)
|
||||
} else if (q[0].size === 1) {
|
||||
q.shift()
|
||||
|
||||
// must be a function or else the Set would've been reused
|
||||
next.add(q[0])
|
||||
} else {
|
||||
q[0].delete(fn)
|
||||
}
|
||||
})
|
||||
running.delete(fn)
|
||||
|
||||
next.forEach(fn => run(fn))
|
||||
return true
|
||||
}
|
||||
|
||||
const reserve = (paths, fn) => {
|
||||
// collide on matches across case and unicode normalization
|
||||
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
|
||||
// impossible to determine whether two paths refer to the same thing on
|
||||
// disk, without asking the kernel for a shortname.
|
||||
// So, we just pretend that every path matches every other path here,
|
||||
// effectively removing all parallelization on windows.
|
||||
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
|
||||
// don't need normPath, because we skip this entirely for windows
|
||||
return normalize(stripSlashes(join(p))).toLowerCase()
|
||||
})
|
||||
|
||||
const dirs = new Set(
|
||||
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
|
||||
)
|
||||
reservations.set(fn, { dirs, paths })
|
||||
paths.forEach(path => {
|
||||
const q = queues.get(path)
|
||||
if (!q) {
|
||||
queues.set(path, [fn])
|
||||
} else {
|
||||
q.push(fn)
|
||||
}
|
||||
})
|
||||
dirs.forEach(dir => {
|
||||
const q = queues.get(dir)
|
||||
if (!q) {
|
||||
queues.set(dir, [new Set([fn])])
|
||||
} else if (q[q.length - 1] instanceof Set) {
|
||||
q[q.length - 1].add(fn)
|
||||
} else {
|
||||
q.push(new Set([fn]))
|
||||
}
|
||||
})
|
||||
|
||||
return run(fn)
|
||||
}
|
||||
|
||||
return { check, reserve }
|
||||
}
|
150
node_modules/tar/lib/pax.js
generated
vendored
Normal file
150
node_modules/tar/lib/pax.js
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
'use strict'
|
||||
const Header = require('./header.js')
|
||||
const path = require('path')
|
||||
|
||||
class Pax {
|
||||
constructor (obj, global) {
|
||||
this.atime = obj.atime || null
|
||||
this.charset = obj.charset || null
|
||||
this.comment = obj.comment || null
|
||||
this.ctime = obj.ctime || null
|
||||
this.gid = obj.gid || null
|
||||
this.gname = obj.gname || null
|
||||
this.linkpath = obj.linkpath || null
|
||||
this.mtime = obj.mtime || null
|
||||
this.path = obj.path || null
|
||||
this.size = obj.size || null
|
||||
this.uid = obj.uid || null
|
||||
this.uname = obj.uname || null
|
||||
this.dev = obj.dev || null
|
||||
this.ino = obj.ino || null
|
||||
this.nlink = obj.nlink || null
|
||||
this.global = global || false
|
||||
}
|
||||
|
||||
encode () {
|
||||
const body = this.encodeBody()
|
||||
if (body === '') {
|
||||
return null
|
||||
}
|
||||
|
||||
const bodyLen = Buffer.byteLength(body)
|
||||
// round up to 512 bytes
|
||||
// add 512 for header
|
||||
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
|
||||
const buf = Buffer.allocUnsafe(bufLen)
|
||||
|
||||
// 0-fill the header section, it might not hit every field
|
||||
for (let i = 0; i < 512; i++) {
|
||||
buf[i] = 0
|
||||
}
|
||||
|
||||
new Header({
|
||||
// XXX split the path
|
||||
// then the path should be PaxHeader + basename, but less than 99,
|
||||
// prepend with the dirname
|
||||
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
|
||||
mode: this.mode || 0o644,
|
||||
uid: this.uid || null,
|
||||
gid: this.gid || null,
|
||||
size: bodyLen,
|
||||
mtime: this.mtime || null,
|
||||
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
|
||||
linkpath: '',
|
||||
uname: this.uname || '',
|
||||
gname: this.gname || '',
|
||||
devmaj: 0,
|
||||
devmin: 0,
|
||||
atime: this.atime || null,
|
||||
ctime: this.ctime || null,
|
||||
}).encode(buf)
|
||||
|
||||
buf.write(body, 512, bodyLen, 'utf8')
|
||||
|
||||
// null pad after the body
|
||||
for (let i = bodyLen + 512; i < buf.length; i++) {
|
||||
buf[i] = 0
|
||||
}
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
encodeBody () {
|
||||
return (
|
||||
this.encodeField('path') +
|
||||
this.encodeField('ctime') +
|
||||
this.encodeField('atime') +
|
||||
this.encodeField('dev') +
|
||||
this.encodeField('ino') +
|
||||
this.encodeField('nlink') +
|
||||
this.encodeField('charset') +
|
||||
this.encodeField('comment') +
|
||||
this.encodeField('gid') +
|
||||
this.encodeField('gname') +
|
||||
this.encodeField('linkpath') +
|
||||
this.encodeField('mtime') +
|
||||
this.encodeField('size') +
|
||||
this.encodeField('uid') +
|
||||
this.encodeField('uname')
|
||||
)
|
||||
}
|
||||
|
||||
encodeField (field) {
|
||||
if (this[field] === null || this[field] === undefined) {
|
||||
return ''
|
||||
}
|
||||
const v = this[field] instanceof Date ? this[field].getTime() / 1000
|
||||
: this[field]
|
||||
const s = ' ' +
|
||||
(field === 'dev' || field === 'ino' || field === 'nlink'
|
||||
? 'SCHILY.' : '') +
|
||||
field + '=' + v + '\n'
|
||||
const byteLen = Buffer.byteLength(s)
|
||||
// the digits includes the length of the digits in ascii base-10
|
||||
// so if it's 9 characters, then adding 1 for the 9 makes it 10
|
||||
// which makes it 11 chars.
|
||||
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
|
||||
if (byteLen + digits >= Math.pow(10, digits)) {
|
||||
digits += 1
|
||||
}
|
||||
const len = digits + byteLen
|
||||
return len + s
|
||||
}
|
||||
}
|
||||
|
||||
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
|
||||
|
||||
const merge = (a, b) =>
|
||||
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
|
||||
|
||||
const parseKV = string =>
|
||||
string
|
||||
.replace(/\n$/, '')
|
||||
.split('\n')
|
||||
.reduce(parseKVLine, Object.create(null))
|
||||
|
||||
const parseKVLine = (set, line) => {
|
||||
const n = parseInt(line, 10)
|
||||
|
||||
// XXX Values with \n in them will fail this.
|
||||
// Refactor to not be a naive line-by-line parse.
|
||||
if (n !== Buffer.byteLength(line) + 1) {
|
||||
return set
|
||||
}
|
||||
|
||||
line = line.slice((n + ' ').length)
|
||||
const kv = line.split('=')
|
||||
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
|
||||
if (!k) {
|
||||
return set
|
||||
}
|
||||
|
||||
const v = kv.join('=')
|
||||
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
|
||||
? new Date(v * 1000)
|
||||
: /^[0-9]+$/.test(v) ? +v
|
||||
: v
|
||||
return set
|
||||
}
|
||||
|
||||
module.exports = Pax
|
107
node_modules/tar/lib/read-entry.js
generated
vendored
Normal file
107
node_modules/tar/lib/read-entry.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
'use strict'
|
||||
const MiniPass = require('minipass')
|
||||
const normPath = require('./normalize-windows-path.js')
|
||||
|
||||
const SLURP = Symbol('slurp')
|
||||
module.exports = class ReadEntry extends MiniPass {
|
||||
constructor (header, ex, gex) {
|
||||
super()
|
||||
// read entries always start life paused. this is to avoid the
|
||||
// situation where Minipass's auto-ending empty streams results
|
||||
// in an entry ending before we're ready for it.
|
||||
this.pause()
|
||||
this.extended = ex
|
||||
this.globalExtended = gex
|
||||
this.header = header
|
||||
this.startBlockSize = 512 * Math.ceil(header.size / 512)
|
||||
this.blockRemain = this.startBlockSize
|
||||
this.remain = header.size
|
||||
this.type = header.type
|
||||
this.meta = false
|
||||
this.ignore = false
|
||||
switch (this.type) {
|
||||
case 'File':
|
||||
case 'OldFile':
|
||||
case 'Link':
|
||||
case 'SymbolicLink':
|
||||
case 'CharacterDevice':
|
||||
case 'BlockDevice':
|
||||
case 'Directory':
|
||||
case 'FIFO':
|
||||
case 'ContiguousFile':
|
||||
case 'GNUDumpDir':
|
||||
break
|
||||
|
||||
case 'NextFileHasLongLinkpath':
|
||||
case 'NextFileHasLongPath':
|
||||
case 'OldGnuLongPath':
|
||||
case 'GlobalExtendedHeader':
|
||||
case 'ExtendedHeader':
|
||||
case 'OldExtendedHeader':
|
||||
this.meta = true
|
||||
break
|
||||
|
||||
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
|
||||
// it may be worth doing the same, but with a warning.
|
||||
default:
|
||||
this.ignore = true
|
||||
}
|
||||
|
||||
this.path = normPath(header.path)
|
||||
this.mode = header.mode
|
||||
if (this.mode) {
|
||||
this.mode = this.mode & 0o7777
|
||||
}
|
||||
this.uid = header.uid
|
||||
this.gid = header.gid
|
||||
this.uname = header.uname
|
||||
this.gname = header.gname
|
||||
this.size = header.size
|
||||
this.mtime = header.mtime
|
||||
this.atime = header.atime
|
||||
this.ctime = header.ctime
|
||||
this.linkpath = normPath(header.linkpath)
|
||||
this.uname = header.uname
|
||||
this.gname = header.gname
|
||||
|
||||
if (ex) {
|
||||
this[SLURP](ex)
|
||||
}
|
||||
if (gex) {
|
||||
this[SLURP](gex, true)
|
||||
}
|
||||
}
|
||||
|
||||
write (data) {
|
||||
const writeLen = data.length
|
||||
if (writeLen > this.blockRemain) {
|
||||
throw new Error('writing more to entry than is appropriate')
|
||||
}
|
||||
|
||||
const r = this.remain
|
||||
const br = this.blockRemain
|
||||
this.remain = Math.max(0, r - writeLen)
|
||||
this.blockRemain = Math.max(0, br - writeLen)
|
||||
if (this.ignore) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (r >= writeLen) {
|
||||
return super.write(data)
|
||||
}
|
||||
|
||||
// r < writeLen
|
||||
return super.write(data.slice(0, r))
|
||||
}
|
||||
|
||||
[SLURP] (ex, global) {
|
||||
for (const k in ex) {
|
||||
// we slurp in everything except for the path attribute in
|
||||
// a global extended header, because that's weird.
|
||||
if (ex[k] !== null && ex[k] !== undefined &&
|
||||
!(global && k === 'path')) {
|
||||
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
246
node_modules/tar/lib/replace.js
generated
vendored
Normal file
246
node_modules/tar/lib/replace.js
generated
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
'use strict'
|
||||
|
||||
// tar -r
|
||||
const hlo = require('./high-level-opt.js')
|
||||
const Pack = require('./pack.js')
|
||||
const fs = require('fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const t = require('./list.js')
|
||||
const path = require('path')
|
||||
|
||||
// starting at the head of the file, read a Header
|
||||
// If the checksum is invalid, that's our position to start writing
|
||||
// If it is, jump forward by the specified size (round up to 512)
|
||||
// and try again.
|
||||
// Write the new Pack stream starting there.
|
||||
|
||||
const Header = require('./header.js')
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (!opt.file) {
|
||||
throw new TypeError('file is required')
|
||||
}
|
||||
|
||||
if (opt.gzip) {
|
||||
throw new TypeError('cannot append to compressed archives')
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files) || !files.length) {
|
||||
throw new TypeError('no files or directories specified')
|
||||
}
|
||||
|
||||
files = Array.from(files)
|
||||
|
||||
return opt.sync ? replaceSync(opt, files)
|
||||
: replace(opt, files, cb)
|
||||
}
|
||||
|
||||
const replaceSync = (opt, files) => {
|
||||
const p = new Pack.Sync(opt)
|
||||
|
||||
let threw = true
|
||||
let fd
|
||||
let position
|
||||
|
||||
try {
|
||||
try {
|
||||
fd = fs.openSync(opt.file, 'r+')
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
fd = fs.openSync(opt.file, 'w+')
|
||||
} else {
|
||||
throw er
|
||||
}
|
||||
}
|
||||
|
||||
const st = fs.fstatSync(fd)
|
||||
const headBuf = Buffer.alloc(512)
|
||||
|
||||
POSITION: for (position = 0; position < st.size; position += 512) {
|
||||
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
|
||||
bytes = fs.readSync(
|
||||
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
|
||||
)
|
||||
|
||||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
|
||||
throw new Error('cannot append to compressed archives')
|
||||
}
|
||||
|
||||
if (!bytes) {
|
||||
break POSITION
|
||||
}
|
||||
}
|
||||
|
||||
const h = new Header(headBuf)
|
||||
if (!h.cksumValid) {
|
||||
break
|
||||
}
|
||||
const entryBlockSize = 512 * Math.ceil(h.size / 512)
|
||||
if (position + entryBlockSize + 512 > st.size) {
|
||||
break
|
||||
}
|
||||
// the 512 for the header we just parsed will be added as well
|
||||
// also jump ahead all the blocks for the body
|
||||
position += entryBlockSize
|
||||
if (opt.mtimeCache) {
|
||||
opt.mtimeCache.set(h.path, h.mtime)
|
||||
}
|
||||
}
|
||||
threw = false
|
||||
|
||||
streamSync(opt, p, position, fd, files)
|
||||
} finally {
|
||||
if (threw) {
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const streamSync = (opt, p, position, fd, files) => {
|
||||
const stream = new fsm.WriteStreamSync(opt.file, {
|
||||
fd: fd,
|
||||
start: position,
|
||||
})
|
||||
p.pipe(stream)
|
||||
addFilesSync(p, files)
|
||||
}
|
||||
|
||||
const replace = (opt, files, cb) => {
|
||||
files = Array.from(files)
|
||||
const p = new Pack(opt)
|
||||
|
||||
const getPos = (fd, size, cb_) => {
|
||||
const cb = (er, pos) => {
|
||||
if (er) {
|
||||
fs.close(fd, _ => cb_(er))
|
||||
} else {
|
||||
cb_(null, pos)
|
||||
}
|
||||
}
|
||||
|
||||
let position = 0
|
||||
if (size === 0) {
|
||||
return cb(null, 0)
|
||||
}
|
||||
|
||||
let bufPos = 0
|
||||
const headBuf = Buffer.alloc(512)
|
||||
const onread = (er, bytes) => {
|
||||
if (er) {
|
||||
return cb(er)
|
||||
}
|
||||
bufPos += bytes
|
||||
if (bufPos < 512 && bytes) {
|
||||
return fs.read(
|
||||
fd, headBuf, bufPos, headBuf.length - bufPos,
|
||||
position + bufPos, onread
|
||||
)
|
||||
}
|
||||
|
||||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
|
||||
return cb(new Error('cannot append to compressed archives'))
|
||||
}
|
||||
|
||||
// truncated header
|
||||
if (bufPos < 512) {
|
||||
return cb(null, position)
|
||||
}
|
||||
|
||||
const h = new Header(headBuf)
|
||||
if (!h.cksumValid) {
|
||||
return cb(null, position)
|
||||
}
|
||||
|
||||
const entryBlockSize = 512 * Math.ceil(h.size / 512)
|
||||
if (position + entryBlockSize + 512 > size) {
|
||||
return cb(null, position)
|
||||
}
|
||||
|
||||
position += entryBlockSize + 512
|
||||
if (position >= size) {
|
||||
return cb(null, position)
|
||||
}
|
||||
|
||||
if (opt.mtimeCache) {
|
||||
opt.mtimeCache.set(h.path, h.mtime)
|
||||
}
|
||||
bufPos = 0
|
||||
fs.read(fd, headBuf, 0, 512, position, onread)
|
||||
}
|
||||
fs.read(fd, headBuf, 0, 512, position, onread)
|
||||
}
|
||||
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
p.on('error', reject)
|
||||
let flag = 'r+'
|
||||
const onopen = (er, fd) => {
|
||||
if (er && er.code === 'ENOENT' && flag === 'r+') {
|
||||
flag = 'w+'
|
||||
return fs.open(opt.file, flag, onopen)
|
||||
}
|
||||
|
||||
if (er) {
|
||||
return reject(er)
|
||||
}
|
||||
|
||||
fs.fstat(fd, (er, st) => {
|
||||
if (er) {
|
||||
return fs.close(fd, () => reject(er))
|
||||
}
|
||||
|
||||
getPos(fd, st.size, (er, position) => {
|
||||
if (er) {
|
||||
return reject(er)
|
||||
}
|
||||
const stream = new fsm.WriteStream(opt.file, {
|
||||
fd: fd,
|
||||
start: position,
|
||||
})
|
||||
p.pipe(stream)
|
||||
stream.on('error', reject)
|
||||
stream.on('close', resolve)
|
||||
addFilesAsync(p, files)
|
||||
})
|
||||
})
|
||||
}
|
||||
fs.open(opt.file, flag, onopen)
|
||||
})
|
||||
|
||||
return cb ? promise.then(cb, cb) : promise
|
||||
}
|
||||
|
||||
const addFilesSync = (p, files) => {
|
||||
files.forEach(file => {
|
||||
if (file.charAt(0) === '@') {
|
||||
t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
sync: true,
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
})
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
})
|
||||
p.end()
|
||||
}
|
||||
|
||||
const addFilesAsync = (p, files) => {
|
||||
while (files.length) {
|
||||
const file = files.shift()
|
||||
if (file.charAt(0) === '@') {
|
||||
return t({
|
||||
file: path.resolve(p.cwd, file.slice(1)),
|
||||
noResume: true,
|
||||
onentry: entry => p.add(entry),
|
||||
}).then(_ => addFilesAsync(p, files))
|
||||
} else {
|
||||
p.add(file)
|
||||
}
|
||||
}
|
||||
p.end()
|
||||
}
|
24
node_modules/tar/lib/strip-absolute-path.js
generated
vendored
Normal file
24
node_modules/tar/lib/strip-absolute-path.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
// unix absolute paths are also absolute on win32, so we use this for both
|
||||
const { isAbsolute, parse } = require('path').win32
|
||||
|
||||
// returns [root, stripped]
|
||||
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
|
||||
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
|
||||
// explicitly if it's the first character.
|
||||
// drive-specific relative paths on Windows get their root stripped off even
|
||||
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
|
||||
module.exports = path => {
|
||||
let r = ''
|
||||
|
||||
let parsed = parse(path)
|
||||
while (isAbsolute(path) || parsed.root) {
|
||||
// windows will think that //x/y/z has a "root" of //x/y/
|
||||
// but strip the //?/C:/ off of //?/C:/path
|
||||
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
|
||||
: parsed.root
|
||||
path = path.slice(root.length)
|
||||
r += root
|
||||
parsed = parse(path)
|
||||
}
|
||||
return [r, path]
|
||||
}
|
13
node_modules/tar/lib/strip-trailing-slashes.js
generated
vendored
Normal file
13
node_modules/tar/lib/strip-trailing-slashes.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
// warning: extremely hot code path.
|
||||
// This has been meticulously optimized for use
|
||||
// within npm install on large package trees.
|
||||
// Do not edit without careful benchmarking.
|
||||
module.exports = str => {
|
||||
let i = str.length - 1
|
||||
let slashesStart = -1
|
||||
while (i > -1 && str.charAt(i) === '/') {
|
||||
slashesStart = i
|
||||
i--
|
||||
}
|
||||
return slashesStart === -1 ? str : str.slice(0, slashesStart)
|
||||
}
|
44
node_modules/tar/lib/types.js
generated
vendored
Normal file
44
node_modules/tar/lib/types.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
'use strict'
|
||||
// map types from key to human-friendly name
|
||||
exports.name = new Map([
|
||||
['0', 'File'],
|
||||
// same as File
|
||||
['', 'OldFile'],
|
||||
['1', 'Link'],
|
||||
['2', 'SymbolicLink'],
|
||||
// Devices and FIFOs aren't fully supported
|
||||
// they are parsed, but skipped when unpacking
|
||||
['3', 'CharacterDevice'],
|
||||
['4', 'BlockDevice'],
|
||||
['5', 'Directory'],
|
||||
['6', 'FIFO'],
|
||||
// same as File
|
||||
['7', 'ContiguousFile'],
|
||||
// pax headers
|
||||
['g', 'GlobalExtendedHeader'],
|
||||
['x', 'ExtendedHeader'],
|
||||
// vendor-specific stuff
|
||||
// skip
|
||||
['A', 'SolarisACL'],
|
||||
// like 5, but with data, which should be skipped
|
||||
['D', 'GNUDumpDir'],
|
||||
// metadata only, skip
|
||||
['I', 'Inode'],
|
||||
// data = link path of next file
|
||||
['K', 'NextFileHasLongLinkpath'],
|
||||
// data = path of next file
|
||||
['L', 'NextFileHasLongPath'],
|
||||
// skip
|
||||
['M', 'ContinuationFile'],
|
||||
// like L
|
||||
['N', 'OldGnuLongPath'],
|
||||
// skip
|
||||
['S', 'SparseFile'],
|
||||
// skip
|
||||
['V', 'TapeVolumeHeader'],
|
||||
// like x
|
||||
['X', 'OldExtendedHeader'],
|
||||
])
|
||||
|
||||
// map the other direction
|
||||
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
|
906
node_modules/tar/lib/unpack.js
generated
vendored
Normal file
906
node_modules/tar/lib/unpack.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
40
node_modules/tar/lib/update.js
generated
vendored
Normal file
40
node_modules/tar/lib/update.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict'
|
||||
|
||||
// tar -u
|
||||
|
||||
const hlo = require('./high-level-opt.js')
|
||||
const r = require('./replace.js')
|
||||
// just call tar.r with the filter and mtimeCache
|
||||
|
||||
module.exports = (opt_, files, cb) => {
|
||||
const opt = hlo(opt_)
|
||||
|
||||
if (!opt.file) {
|
||||
throw new TypeError('file is required')
|
||||
}
|
||||
|
||||
if (opt.gzip) {
|
||||
throw new TypeError('cannot append to compressed archives')
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files) || !files.length) {
|
||||
throw new TypeError('no files or directories specified')
|
||||
}
|
||||
|
||||
files = Array.from(files)
|
||||
|
||||
mtimeFilter(opt)
|
||||
return r(opt, files, cb)
|
||||
}
|
||||
|
||||
const mtimeFilter = opt => {
|
||||
const filter = opt.filter
|
||||
|
||||
if (!opt.mtimeCache) {
|
||||
opt.mtimeCache = new Map()
|
||||
}
|
||||
|
||||
opt.filter = filter ? (path, stat) =>
|
||||
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
|
||||
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
|
||||
}
|
24
node_modules/tar/lib/warn-mixin.js
generated
vendored
Normal file
24
node_modules/tar/lib/warn-mixin.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
module.exports = Base => class extends Base {
|
||||
warn (code, message, data = {}) {
|
||||
if (this.file) {
|
||||
data.file = this.file
|
||||
}
|
||||
if (this.cwd) {
|
||||
data.cwd = this.cwd
|
||||
}
|
||||
data.code = message instanceof Error && message.code || code
|
||||
data.tarCode = code
|
||||
if (!this.strict && data.recoverable !== false) {
|
||||
if (message instanceof Error) {
|
||||
data = Object.assign(message, data)
|
||||
message = message.message
|
||||
}
|
||||
this.emit('warn', data.tarCode, message, data)
|
||||
} else if (message instanceof Error) {
|
||||
this.emit('error', Object.assign(message, data))
|
||||
} else {
|
||||
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
|
||||
}
|
||||
}
|
||||
}
|
23
node_modules/tar/lib/winchars.js
generated
vendored
Normal file
23
node_modules/tar/lib/winchars.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
// When writing files on Windows, translate the characters to their
|
||||
// 0xf000 higher-encoded versions.
|
||||
|
||||
const raw = [
|
||||
'|',
|
||||
'<',
|
||||
'>',
|
||||
'?',
|
||||
':',
|
||||
]
|
||||
|
||||
const win = raw.map(char =>
|
||||
String.fromCharCode(0xf000 + char.charCodeAt(0)))
|
||||
|
||||
const toWin = new Map(raw.map((char, i) => [char, win[i]]))
|
||||
const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
|
||||
|
||||
module.exports = {
|
||||
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
|
||||
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
|
||||
}
|
546
node_modules/tar/lib/write-entry.js
generated
vendored
Normal file
546
node_modules/tar/lib/write-entry.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
15
node_modules/tar/node_modules/minipass/LICENSE
generated
vendored
Normal file
15
node_modules/tar/node_modules/minipass/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
769
node_modules/tar/node_modules/minipass/README.md
generated
vendored
Normal file
769
node_modules/tar/node_modules/minipass/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
150
node_modules/tar/node_modules/minipass/index.d.ts
generated
vendored
Normal file
150
node_modules/tar/node_modules/minipass/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
/// <reference types="node" />
|
||||
|
||||
// Note: marking anything protected or private in the exported
|
||||
// class will limit Minipass's ability to be used as the base
|
||||
// for mixin classes.
|
||||
import { EventEmitter } from 'events'
|
||||
import { Stream } from 'stream'
|
||||
|
||||
declare namespace Minipass {
|
||||
type Encoding = BufferEncoding | 'buffer' | null
|
||||
|
||||
interface Writable extends EventEmitter {
|
||||
end(): any
|
||||
write(chunk: any, ...args: any[]): any
|
||||
}
|
||||
|
||||
interface Readable extends EventEmitter {
|
||||
pause(): any
|
||||
resume(): any
|
||||
pipe(): any
|
||||
}
|
||||
|
||||
type DualIterable<T> = Iterable<T> & AsyncIterable<T>
|
||||
|
||||
type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string
|
||||
|
||||
type BufferOrString = Buffer | string
|
||||
|
||||
interface SharedOptions {
|
||||
async?: boolean
|
||||
signal?: AbortSignal
|
||||
}
|
||||
|
||||
interface StringOptions extends SharedOptions {
|
||||
encoding: BufferEncoding
|
||||
objectMode?: boolean
|
||||
}
|
||||
|
||||
interface BufferOptions extends SharedOptions {
|
||||
encoding?: null | 'buffer'
|
||||
objectMode?: boolean
|
||||
}
|
||||
|
||||
interface ObjectModeOptions extends SharedOptions {
|
||||
objectMode: true
|
||||
}
|
||||
|
||||
interface PipeOptions {
|
||||
end?: boolean
|
||||
proxyErrors?: boolean
|
||||
}
|
||||
|
||||
type Options<T> = T extends string
|
||||
? StringOptions
|
||||
: T extends Buffer
|
||||
? BufferOptions
|
||||
: ObjectModeOptions
|
||||
}
|
||||
|
||||
declare class Minipass<
|
||||
RType extends any = Buffer,
|
||||
WType extends any = RType extends Minipass.BufferOrString
|
||||
? Minipass.ContiguousData
|
||||
: RType
|
||||
>
|
||||
extends Stream
|
||||
implements Minipass.DualIterable<RType>
|
||||
{
|
||||
static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable
|
||||
|
||||
readonly bufferLength: number
|
||||
readonly flowing: boolean
|
||||
readonly writable: boolean
|
||||
readonly readable: boolean
|
||||
readonly aborted: boolean
|
||||
readonly paused: boolean
|
||||
readonly emittedEnd: boolean
|
||||
readonly destroyed: boolean
|
||||
|
||||
/**
|
||||
* Technically writable, but mutating it can change the type,
|
||||
* so is not safe to do in TypeScript.
|
||||
*/
|
||||
readonly objectMode: boolean
|
||||
async: boolean
|
||||
|
||||
/**
|
||||
* Note: encoding is not actually read-only, and setEncoding(enc)
|
||||
* exists. However, this type definition will insist that TypeScript
|
||||
* programs declare the type of a Minipass stream up front, and if
|
||||
* that type is string, then an encoding MUST be set in the ctor. If
|
||||
* the type is Buffer, then the encoding must be missing, or set to
|
||||
* 'buffer' or null. If the type is anything else, then objectMode
|
||||
* must be set in the constructor options. So there is effectively
|
||||
* no allowed way that a TS program can set the encoding after
|
||||
* construction, as doing so will destroy any hope of type safety.
|
||||
* TypeScript does not provide many options for changing the type of
|
||||
* an object at run-time, which is what changing the encoding does.
|
||||
*/
|
||||
readonly encoding: Minipass.Encoding
|
||||
// setEncoding(encoding: Encoding): void
|
||||
|
||||
// Options required if not reading buffers
|
||||
constructor(
|
||||
...args: RType extends Buffer
|
||||
? [] | [Minipass.Options<RType>]
|
||||
: [Minipass.Options<RType>]
|
||||
)
|
||||
|
||||
write(chunk: WType, cb?: () => void): boolean
|
||||
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean
|
||||
read(size?: number): RType
|
||||
end(cb?: () => void): this
|
||||
end(chunk: any, cb?: () => void): this
|
||||
end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this
|
||||
pause(): void
|
||||
resume(): void
|
||||
promise(): Promise<void>
|
||||
collect(): Promise<RType[]>
|
||||
|
||||
concat(): RType extends Minipass.BufferOrString ? Promise<RType> : never
|
||||
destroy(er?: any): void
|
||||
pipe<W extends Minipass.Writable>(dest: W, opts?: Minipass.PipeOptions): W
|
||||
unpipe<W extends Minipass.Writable>(dest: W): void
|
||||
|
||||
/**
|
||||
* alias for on()
|
||||
*/
|
||||
addEventHandler(event: string, listener: (...args: any[]) => any): this
|
||||
|
||||
on(event: string, listener: (...args: any[]) => any): this
|
||||
on(event: 'data', listener: (chunk: RType) => any): this
|
||||
on(event: 'error', listener: (error: any) => any): this
|
||||
on(
|
||||
event:
|
||||
| 'readable'
|
||||
| 'drain'
|
||||
| 'resume'
|
||||
| 'end'
|
||||
| 'prefinish'
|
||||
| 'finish'
|
||||
| 'close',
|
||||
listener: () => any
|
||||
): this
|
||||
|
||||
[Symbol.iterator](): Generator<RType, void, void>
|
||||
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>
|
||||
}
|
||||
|
||||
export = Minipass
|
697
node_modules/tar/node_modules/minipass/index.js
generated
vendored
Normal file
697
node_modules/tar/node_modules/minipass/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
697
node_modules/tar/node_modules/minipass/index.mjs
generated
vendored
Normal file
697
node_modules/tar/node_modules/minipass/index.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
76
node_modules/tar/node_modules/minipass/package.json
generated
vendored
Normal file
76
node_modules/tar/node_modules/minipass/package.json
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"name": "minipass",
|
||||
"version": "4.2.4",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "./index.js",
|
||||
"module": "./index.mjs",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.mjs"
|
||||
},
|
||||
"require": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.js"
|
||||
}
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.41",
|
||||
"end-of-stream": "^1.4.0",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"prettier": "^2.6.2",
|
||||
"tap": "^16.2.0",
|
||||
"through2": "^2.0.3",
|
||||
"ts-node": "^10.8.1",
|
||||
"typedoc": "^0.23.24",
|
||||
"typescript": "^4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"prepare": "node ./scripts/transpile-to-esm.js",
|
||||
"snap": "tap",
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags",
|
||||
"typedoc": "typedoc ./index.d.ts",
|
||||
"format": "prettier --write . --loglevel warn"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.js",
|
||||
"index.mjs"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
}
|
75
node_modules/tar/package.json
generated
vendored
Normal file
75
node_modules/tar/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"author": "GitHub Inc.",
|
||||
"name": "tar",
|
||||
"description": "tar for node",
|
||||
"version": "6.1.13",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/node-tar.git"
|
||||
},
|
||||
"scripts": {
|
||||
"genparse": "node scripts/generate-parse-fixtures.js",
|
||||
"template-oss-apply": "template-oss-apply --force",
|
||||
"lint": "eslint \"**/*.js\"",
|
||||
"postlint": "template-oss-check",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"snap": "tap",
|
||||
"test": "tap",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.0.0",
|
||||
"minipass": "^4.0.0",
|
||||
"minizlib": "^2.1.1",
|
||||
"mkdirp": "^1.0.3",
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^4.0.0",
|
||||
"@npmcli/template-oss": "4.10.0",
|
||||
"chmodr": "^1.2.0",
|
||||
"end-of-stream": "^1.4.3",
|
||||
"events-to-array": "^2.0.3",
|
||||
"mutate-fs": "^2.1.1",
|
||||
"nock": "^13.2.9",
|
||||
"rimraf": "^3.0.2",
|
||||
"tap": "^16.0.1"
|
||||
},
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/",
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"coverage-map": "map.js",
|
||||
"timeout": 0,
|
||||
"nyc-arg": [
|
||||
"--exclude",
|
||||
"tap-snapshots/**"
|
||||
]
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"version": "4.10.0",
|
||||
"content": "scripts/template-oss",
|
||||
"engines": ">=10",
|
||||
"distPaths": [
|
||||
"index.js"
|
||||
],
|
||||
"allowPaths": [
|
||||
"/index.js"
|
||||
],
|
||||
"ciVersions": [
|
||||
"10.x",
|
||||
"12.x",
|
||||
"14.x",
|
||||
"16.x",
|
||||
"18.x"
|
||||
]
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user