$
This commit is contained in:
16
node_modules/cacache/LICENSE.md
generated
vendored
Normal file
16
node_modules/cacache/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for
|
||||
any purpose with or without fee is hereby granted, provided that the
|
||||
above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
|
||||
ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
|
||||
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
716
node_modules/cacache/README.md
generated
vendored
Normal file
716
node_modules/cacache/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
29
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
29
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const contentVer = require('../../package.json')['cache-version'].content
|
||||
const hashToSegments = require('../util/hash-to-segments')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
// Current format of content file path:
|
||||
//
|
||||
// sha512-BaSE64Hex= ->
|
||||
// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
|
||||
//
|
||||
module.exports = contentPath
|
||||
|
||||
function contentPath (cache, integrity) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
// contentPath is the *strongest* algo given
|
||||
return path.join(
|
||||
contentDir(cache),
|
||||
sri.algorithm,
|
||||
...hashToSegments(sri.hexDigest())
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.contentDir = contentDir
|
||||
|
||||
function contentDir (cache) {
|
||||
return path.join(cache, `content-v${contentVer}`)
|
||||
}
|
241
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
241
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const ssri = require('ssri')
|
||||
const contentPath = require('./path')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
module.exports = read
|
||||
|
||||
const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
|
||||
async function read (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && stat.size !== size) {
|
||||
throw sizeError(size, stat.size)
|
||||
}
|
||||
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
const data = await fs.readFile(cpath, { encoding: null })
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const readPipeline = (cpath, size, sri, stream) => {
|
||||
stream.push(
|
||||
new fsm.ReadStream(cpath, {
|
||||
size,
|
||||
readSize: MAX_SINGLE_READ_SIZE,
|
||||
}),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size,
|
||||
})
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.sync = readSync
|
||||
|
||||
function readSync (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
const data = fs.readFileSync(cpath, { encoding: null })
|
||||
if (typeof size === 'number' && size !== data.length) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
}
|
||||
|
||||
throw integrityError(sri, cpath)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
|
||||
function readStream (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// just stat to ensure it exists
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && size !== stat.size) {
|
||||
return stream.emit('error', sizeError(size, stat.size))
|
||||
}
|
||||
|
||||
return readPipeline(cpath, stat.size, sri, stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
module.exports.copy.sync = copySync
|
||||
|
||||
function copy (cache, integrity, dest) {
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
function copySync (cache, integrity, dest) {
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFileSync(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
|
||||
async function hasContent (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
return await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
const stat = await fs.stat(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.hasContent.sync = hasContentSync
|
||||
|
||||
function hasContentSync (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
try {
|
||||
const stat = fs.statSync(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function withContentSri (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
// Can't use race here because a generic error can happen before
|
||||
// a ENOENT error, and can happen before a valid result
|
||||
const results = await Promise.all(digests.map(async (meta) => {
|
||||
try {
|
||||
return await withContentSri(cache, meta, fn)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
}
|
||||
}))
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
}
|
||||
}
|
||||
|
||||
function withContentSriSync (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
let lastErr = null
|
||||
for (const meta of digests) {
|
||||
try {
|
||||
return withContentSriSync(cache, meta, fn)
|
||||
} catch (err) {
|
||||
lastErr = err
|
||||
}
|
||||
}
|
||||
throw lastErr
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function integrityError (sri, path) {
|
||||
const err = new Error(`Integrity verification failed for ${sri} (${path})`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.sri = sri
|
||||
err.path = path
|
||||
return err
|
||||
}
|
20
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
20
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const { hasContent } = require('./read')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
|
||||
module.exports = rm
|
||||
|
||||
async function rm (cache, integrity) {
|
||||
const content = await hasContent(cache, integrity)
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
await rimraf(contentPath(cache, content.sri))
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
189
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
189
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
'use strict'
|
||||
|
||||
const events = require('events')
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fixOwner = require('../util/fix-owner')
|
||||
const fs = require('@npmcli/fs')
|
||||
const moveFile = require('../util/move-file')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
const Flush = require('minipass-flush')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const fsm = require('fs-minipass')
|
||||
|
||||
module.exports = write
|
||||
|
||||
async function write (cache, data, opts = {}) {
|
||||
const { algorithms, size, integrity } = opts
|
||||
if (algorithms && algorithms.length > 1) {
|
||||
throw new Error('opts.algorithms only supports a single algorithm for now')
|
||||
}
|
||||
|
||||
if (typeof size === 'number' && data.length !== size) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
throw checksumError(integrity, sri)
|
||||
}
|
||||
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
await fs.writeFile(tmp.target, data, { flag: 'wx' })
|
||||
await moveToDestination(tmp, cache, sri, opts)
|
||||
return { integrity: sri, size: data.length }
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
|
||||
// writes proxied to the 'inputStream' that is passed to the Promise
|
||||
// 'end' is deferred until content is handled.
|
||||
class CacacheWriteStream extends Flush {
|
||||
constructor (cache, opts) {
|
||||
super()
|
||||
this.opts = opts
|
||||
this.cache = cache
|
||||
this.inputStream = new Minipass()
|
||||
this.inputStream.on('error', er => this.emit('error', er))
|
||||
this.inputStream.on('drain', () => this.emit('drain'))
|
||||
this.handleContentP = null
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (!this.handleContentP) {
|
||||
this.handleContentP = handleContent(
|
||||
this.inputStream,
|
||||
this.cache,
|
||||
this.opts
|
||||
)
|
||||
}
|
||||
return this.inputStream.write(chunk, encoding, cb)
|
||||
}
|
||||
|
||||
flush (cb) {
|
||||
this.inputStream.end(() => {
|
||||
if (!this.handleContentP) {
|
||||
const e = new Error('Cache input stream was empty')
|
||||
e.code = 'ENODATA'
|
||||
// empty streams are probably emitting end right away.
|
||||
// defer this one tick by rejecting a promise on it.
|
||||
return Promise.reject(e).catch(cb)
|
||||
}
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
this.handleContentP.then(
|
||||
(res) => {
|
||||
res.integrity && this.emit('integrity', res.integrity)
|
||||
// eslint-disable-next-line promise/always-return
|
||||
res.size !== null && this.emit('size', res.size)
|
||||
cb()
|
||||
},
|
||||
(er) => cb(er)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (cache, opts = {}) {
|
||||
return new CacacheWriteStream(cache, opts)
|
||||
}
|
||||
|
||||
async function handleContent (inputStream, cache, opts) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
await moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
)
|
||||
return res
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx',
|
||||
})
|
||||
|
||||
if (opts.integrityEmitter) {
|
||||
// we need to create these all simultaneously since they can fire in any order
|
||||
const [integrity, size] = await Promise.all([
|
||||
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
|
||||
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
|
||||
new Pipeline(inputStream, outStream).promise(),
|
||||
])
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size,
|
||||
})
|
||||
hashStream.on('integrity', i => {
|
||||
integrity = i
|
||||
})
|
||||
hashStream.on('size', s => {
|
||||
size = s
|
||||
})
|
||||
|
||||
const pipeline = new Pipeline(inputStream, hashStream, outStream)
|
||||
await pipeline.promise()
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
async function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fixOwner.mkdirfix(cache, path.dirname(tmpTarget))
|
||||
return {
|
||||
target: tmpTarget,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
async function moveToDestination (tmp, cache, sri, opts) {
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
|
||||
await fixOwner.mkdirfix(cache, destDir)
|
||||
await moveFile(tmp.target, destination)
|
||||
tmp.moved = true
|
||||
await fixOwner.chownr(cache, destination)
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function checksumError (expected, found) {
|
||||
const err = new Error(`Integrity check failed:
|
||||
Wanted: ${expected}
|
||||
Found: ${found}`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
return err
|
||||
}
|
404
node_modules/cacache/lib/entry-index.js
generated
vendored
Normal file
404
node_modules/cacache/lib/entry-index.js
generated
vendored
Normal file
@@ -0,0 +1,404 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('@npmcli/fs')
|
||||
const Minipass = require('minipass')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const hashToSegments = require('./util/hash-to-segments')
|
||||
const indexV = require('../package.json')['cache-version'].index
|
||||
const moveFile = require('@npmcli/move-file')
|
||||
const _rimraf = require('rimraf')
|
||||
const rimraf = util.promisify(_rimraf)
|
||||
rimraf.sync = _rimraf.sync
|
||||
|
||||
module.exports.NotFoundError = class NotFoundError extends Error {
|
||||
constructor (cache, key) {
|
||||
super(`No cache entry for ${key} found in ${cache}`)
|
||||
this.code = 'ENOENT'
|
||||
this.cache = cache
|
||||
this.key = key
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.compact = compact
|
||||
|
||||
async function compact (cache, key, matchFn, opts = {}) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entries = await bucketEntries(bucket)
|
||||
const newEntries = []
|
||||
// we loop backwards because the bottom-most result is the newest
|
||||
// since we add new entries with appendFile
|
||||
for (let i = entries.length - 1; i >= 0; --i) {
|
||||
const entry = entries[i]
|
||||
// a null integrity could mean either a delete was appended
|
||||
// or the user has simply stored an index that does not map
|
||||
// to any content. we determine if the user wants to keep the
|
||||
// null integrity based on the validateEntry function passed in options.
|
||||
// if the integrity is null and no validateEntry is provided, we break
|
||||
// as we consider the null integrity to be a deletion of everything
|
||||
// that came before it.
|
||||
if (entry.integrity === null && !opts.validateEntry) {
|
||||
break
|
||||
}
|
||||
|
||||
// if this entry is valid, and it is either the first entry or
|
||||
// the newEntries array doesn't already include an entry that
|
||||
// matches this one based on the provided matchFn, then we add
|
||||
// it to the beginning of our list
|
||||
if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
|
||||
(newEntries.length === 0 ||
|
||||
!newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
|
||||
newEntries.unshift(entry)
|
||||
}
|
||||
}
|
||||
|
||||
const newIndex = '\n' + newEntries.map((entry) => {
|
||||
const stringified = JSON.stringify(entry)
|
||||
const hash = hashEntry(stringified)
|
||||
return `${hash}\t${stringified}`
|
||||
}).join('\n')
|
||||
|
||||
const setup = async () => {
|
||||
const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fixOwner.mkdirfix(cache, path.dirname(target))
|
||||
return {
|
||||
target,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
const teardown = async (tmp) => {
|
||||
if (!tmp.moved) {
|
||||
return rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
|
||||
const write = async (tmp) => {
|
||||
await fs.writeFile(tmp.target, newIndex, { flag: 'wx' })
|
||||
await fixOwner.mkdirfix(cache, path.dirname(bucket))
|
||||
// we use @npmcli/move-file directly here because we
|
||||
// want to overwrite the existing file
|
||||
await moveFile(tmp.target, bucket)
|
||||
tmp.moved = true
|
||||
try {
|
||||
await fixOwner.chownr(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// write the file atomically
|
||||
const tmp = await setup()
|
||||
try {
|
||||
await write(tmp)
|
||||
} finally {
|
||||
await teardown(tmp)
|
||||
}
|
||||
|
||||
// we reverse the list we generated such that the newest
|
||||
// entries come first in order to make looping through them easier
|
||||
// the true passed to formatEntry tells it to keep null
|
||||
// integrity values, if they made it this far it's because
|
||||
// validateEntry returned true, and as such we should return it
|
||||
return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
|
||||
}
|
||||
|
||||
module.exports.insert = insert
|
||||
|
||||
async function insert (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata,
|
||||
}
|
||||
try {
|
||||
await fixOwner.mkdirfix(cache, path.dirname(bucket))
|
||||
const stringified = JSON.stringify(entry)
|
||||
// NOTE - Cleverness ahoy!
|
||||
//
|
||||
// This works because it's tremendously unlikely for an entry to corrupt
|
||||
// another while still preserving the string length of the JSON in
|
||||
// question. So, we just slap the length in there and verify it on read.
|
||||
//
|
||||
// Thanks to @isaacs for the whiteboarding session that ended up with
|
||||
// this.
|
||||
await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
await fixOwner.chownr(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
throw err
|
||||
// There's a class of race conditions that happen when things get deleted
|
||||
// during fixOwner, or between the two mkdirfix/chownr calls.
|
||||
//
|
||||
// It's perfectly fine to just not bother in those cases and lie
|
||||
// that the index entry was written. Because it's a cache.
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.insert.sync = insertSync
|
||||
|
||||
function insertSync (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata,
|
||||
}
|
||||
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
|
||||
const stringified = JSON.stringify(entry)
|
||||
fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
try {
|
||||
fixOwner.chownr.sync(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.find = find
|
||||
|
||||
async function find (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
const entries = await bucketEntries(bucket)
|
||||
return entries.reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.find.sync = findSync
|
||||
|
||||
function findSync (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
return bucketEntriesSync(bucket).reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.delete = del
|
||||
|
||||
function del (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insert(cache, key, null, opts)
|
||||
}
|
||||
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rimraf(bucket)
|
||||
}
|
||||
|
||||
module.exports.delete.sync = delSync
|
||||
|
||||
function delSync (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insertSync(cache, key, null, opts)
|
||||
}
|
||||
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rimraf.sync(bucket)
|
||||
}
|
||||
|
||||
module.exports.lsStream = lsStream
|
||||
|
||||
function lsStream (cache) {
|
||||
const indexDir = bucketDir(cache)
|
||||
const stream = new Minipass({ objectMode: true })
|
||||
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const buckets = await readdirOrEmpty(indexDir)
|
||||
await Promise.all(buckets.map(async (bucket) => {
|
||||
const bucketPath = path.join(indexDir, bucket)
|
||||
const subbuckets = await readdirOrEmpty(bucketPath)
|
||||
await Promise.all(subbuckets.map(async (subbucket) => {
|
||||
const subbucketPath = path.join(bucketPath, subbucket)
|
||||
|
||||
// "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
|
||||
const subbucketEntries = await readdirOrEmpty(subbucketPath)
|
||||
await Promise.all(subbucketEntries.map(async (entry) => {
|
||||
const entryPath = path.join(subbucketPath, entry)
|
||||
try {
|
||||
const entries = await bucketEntries(entryPath)
|
||||
// using a Map here prevents duplicate keys from showing up
|
||||
// twice, I guess?
|
||||
const reduced = entries.reduce((acc, entry) => {
|
||||
acc.set(entry.key, entry)
|
||||
return acc
|
||||
}, new Map())
|
||||
// reduced is a map of key => entry
|
||||
for (const entry of reduced.values()) {
|
||||
const formatted = formatEntry(cache, entry)
|
||||
if (formatted) {
|
||||
stream.write(formatted)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
stream.end()
|
||||
return stream
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.ls = ls
|
||||
|
||||
async function ls (cache) {
|
||||
const entries = await lsStream(cache).collect()
|
||||
return entries.reduce((acc, xs) => {
|
||||
acc[xs.key] = xs
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
module.exports.bucketEntries = bucketEntries
|
||||
|
||||
async function bucketEntries (bucket, filter) {
|
||||
const data = await fs.readFile(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
module.exports.bucketEntries.sync = bucketEntriesSync
|
||||
|
||||
function bucketEntriesSync (bucket, filter) {
|
||||
const data = fs.readFileSync(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
function _bucketEntries (data, filter) {
|
||||
const entries = []
|
||||
data.split('\n').forEach((entry) => {
|
||||
if (!entry) {
|
||||
return
|
||||
}
|
||||
|
||||
const pieces = entry.split('\t')
|
||||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
|
||||
// Hash is no good! Corruption or malice? Doesn't matter!
|
||||
// EJECT EJECT
|
||||
return
|
||||
}
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(pieces[1])
|
||||
} catch (e) {
|
||||
// Entry is corrupted!
|
||||
return
|
||||
}
|
||||
if (obj) {
|
||||
entries.push(obj)
|
||||
}
|
||||
})
|
||||
return entries
|
||||
}
|
||||
|
||||
module.exports.bucketDir = bucketDir
|
||||
|
||||
function bucketDir (cache) {
|
||||
return path.join(cache, `index-v${indexV}`)
|
||||
}
|
||||
|
||||
module.exports.bucketPath = bucketPath
|
||||
|
||||
function bucketPath (cache, key) {
|
||||
const hashed = hashKey(key)
|
||||
return path.join.apply(
|
||||
path,
|
||||
[bucketDir(cache)].concat(hashToSegments(hashed))
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.hashKey = hashKey
|
||||
|
||||
function hashKey (key) {
|
||||
return hash(key, 'sha256')
|
||||
}
|
||||
|
||||
module.exports.hashEntry = hashEntry
|
||||
|
||||
function hashEntry (str) {
|
||||
return hash(str, 'sha1')
|
||||
}
|
||||
|
||||
function hash (str, digest) {
|
||||
return crypto
|
||||
.createHash(digest)
|
||||
.update(str)
|
||||
.digest('hex')
|
||||
}
|
||||
|
||||
function formatEntry (cache, entry, keepAll) {
|
||||
// Treat null digests as deletions. They'll shadow any previous entries.
|
||||
if (!entry.integrity && !keepAll) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
key: entry.key,
|
||||
integrity: entry.integrity,
|
||||
path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
|
||||
size: entry.size,
|
||||
time: entry.time,
|
||||
metadata: entry.metadata,
|
||||
}
|
||||
}
|
||||
|
||||
function readdirOrEmpty (dir) {
|
||||
return fs.readdir(dir).catch((err) => {
|
||||
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
|
||||
return []
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
225
node_modules/cacache/lib/get.js
generated
vendored
Normal file
225
node_modules/cacache/lib/get.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
'use strict'
|
||||
|
||||
const Collect = require('minipass-collect')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const read = require('./content/read')
|
||||
|
||||
async function getData (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = await read(cache, entry.integrity, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
module.exports = getData
|
||||
|
||||
async function getDataByDigest (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = await read(cache, key, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
}
|
||||
return res
|
||||
}
|
||||
module.exports.byDigest = getDataByDigest
|
||||
|
||||
function getDataSync (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
const entry = index.find.sync(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = read.sync(cache, entry.integrity, {
|
||||
integrity: integrity,
|
||||
size: size,
|
||||
})
|
||||
const res = {
|
||||
metadata: entry.metadata,
|
||||
data: data,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, res.data, opts)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
module.exports.sync = getDataSync
|
||||
|
||||
function getDataByDigestSync (cache, digest, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, digest, opts)
|
||||
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = read.sync(cache, digest, {
|
||||
integrity: integrity,
|
||||
size: size,
|
||||
})
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, digest, res, opts)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
module.exports.sync.byDigest = getDataByDigestSync
|
||||
|
||||
const getMemoizedStream = (memoized) => {
|
||||
const stream = new Minipass()
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(memoized.entry.metadata)
|
||||
ev === 'integrity' && cb(memoized.entry.integrity)
|
||||
ev === 'size' && cb(memoized.entry.size)
|
||||
})
|
||||
stream.end(memoized.data)
|
||||
return stream
|
||||
}
|
||||
|
||||
function getStream (cache, key, opts = {}) {
|
||||
const { memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return getMemoizedStream(memoized)
|
||||
}
|
||||
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const entry = await index.find(cache, key)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
|
||||
stream.emit('metadata', entry.metadata)
|
||||
stream.emit('integrity', entry.integrity)
|
||||
stream.emit('size', entry.size)
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(entry.metadata)
|
||||
ev === 'integrity' && cb(entry.integrity)
|
||||
ev === 'size' && cb(entry.size)
|
||||
})
|
||||
|
||||
const src = read.readStream(
|
||||
cache,
|
||||
entry.integrity,
|
||||
{ ...opts, size: typeof size !== 'number' ? entry.size : size }
|
||||
)
|
||||
|
||||
if (memoize) {
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put(cache, entry, data, opts))
|
||||
stream.unshift(memoStream)
|
||||
}
|
||||
stream.unshift(src)
|
||||
return stream
|
||||
}).catch((err) => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream = getStream
|
||||
|
||||
function getStreamDigest (cache, integrity, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get.byDigest(cache, integrity, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
const stream = new Minipass()
|
||||
stream.end(memoized)
|
||||
return stream
|
||||
} else {
|
||||
const stream = read.readStream(cache, integrity, opts)
|
||||
if (!memoize) {
|
||||
return stream
|
||||
}
|
||||
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put.byDigest(
|
||||
cache,
|
||||
integrity,
|
||||
data,
|
||||
opts
|
||||
))
|
||||
return new Pipeline(stream, memoStream)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream.byDigest = getStreamDigest
|
||||
|
||||
function info (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(memoized.entry)
|
||||
} else {
|
||||
return index.find(cache, key)
|
||||
}
|
||||
}
|
||||
module.exports.info = info
|
||||
|
||||
async function copy (cache, key, dest, opts = {}) {
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
await read.copy(cache, entry.integrity, dest, opts)
|
||||
return {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
|
||||
async function copyByDigest (cache, key, dest, opts = {}) {
|
||||
await read.copy(cache, key, dest, opts)
|
||||
return key
|
||||
}
|
||||
|
||||
module.exports.copy.byDigest = copyByDigest
|
||||
|
||||
module.exports.hasContent = read.hasContent
|
45
node_modules/cacache/lib/index.js
generated
vendored
Normal file
45
node_modules/cacache/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
'use strict'
|
||||
|
||||
const get = require('./get.js')
|
||||
const put = require('./put.js')
|
||||
const rm = require('./rm.js')
|
||||
const verify = require('./verify.js')
|
||||
const { clearMemoized } = require('./memoization.js')
|
||||
const tmp = require('./util/tmp.js')
|
||||
const index = require('./entry-index.js')
|
||||
|
||||
module.exports.index = {}
|
||||
module.exports.index.compact = index.compact
|
||||
module.exports.index.insert = index.insert
|
||||
|
||||
module.exports.ls = index.ls
|
||||
module.exports.ls.stream = index.lsStream
|
||||
|
||||
module.exports.get = get
|
||||
module.exports.get.byDigest = get.byDigest
|
||||
module.exports.get.sync = get.sync
|
||||
module.exports.get.sync.byDigest = get.sync.byDigest
|
||||
module.exports.get.stream = get.stream
|
||||
module.exports.get.stream.byDigest = get.stream.byDigest
|
||||
module.exports.get.copy = get.copy
|
||||
module.exports.get.copy.byDigest = get.copy.byDigest
|
||||
module.exports.get.info = get.info
|
||||
module.exports.get.hasContent = get.hasContent
|
||||
module.exports.get.hasContent.sync = get.hasContent.sync
|
||||
|
||||
module.exports.put = put
|
||||
module.exports.put.stream = put.stream
|
||||
|
||||
module.exports.rm = rm.entry
|
||||
module.exports.rm.all = rm.all
|
||||
module.exports.rm.entry = module.exports.rm
|
||||
module.exports.rm.content = rm.content
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
module.exports.tmp = {}
|
||||
module.exports.tmp.mkdir = tmp.mkdir
|
||||
module.exports.tmp.withTmp = tmp.withTmp
|
||||
|
||||
module.exports.verify = verify
|
||||
module.exports.verify.lastRun = verify.lastRun
|
72
node_modules/cacache/lib/memoization.js
generated
vendored
Normal file
72
node_modules/cacache/lib/memoization.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const LRU = require('lru-cache')
|
||||
|
||||
const MEMOIZED = new LRU({
|
||||
max: 500,
|
||||
maxSize: 50 * 1024 * 1024, // 50MB
|
||||
ttl: 3 * 60 * 1000, // 3 minutes
|
||||
sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
|
||||
})
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
function clearMemoized () {
|
||||
const old = {}
|
||||
MEMOIZED.forEach((v, k) => {
|
||||
old[k] = v
|
||||
})
|
||||
MEMOIZED.clear()
|
||||
return old
|
||||
}
|
||||
|
||||
module.exports.put = put
|
||||
|
||||
function put (cache, entry, data, opts) {
|
||||
pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
|
||||
putDigest(cache, entry.integrity, data, opts)
|
||||
}
|
||||
|
||||
module.exports.put.byDigest = putDigest
|
||||
|
||||
function putDigest (cache, integrity, data, opts) {
|
||||
pickMem(opts).set(`digest:${cache}:${integrity}`, data)
|
||||
}
|
||||
|
||||
module.exports.get = get
|
||||
|
||||
function get (cache, key, opts) {
|
||||
return pickMem(opts).get(`key:${cache}:${key}`)
|
||||
}
|
||||
|
||||
module.exports.get.byDigest = getDigest
|
||||
|
||||
function getDigest (cache, integrity, opts) {
|
||||
return pickMem(opts).get(`digest:${cache}:${integrity}`)
|
||||
}
|
||||
|
||||
class ObjProxy {
|
||||
constructor (obj) {
|
||||
this.obj = obj
|
||||
}
|
||||
|
||||
get (key) {
|
||||
return this.obj[key]
|
||||
}
|
||||
|
||||
set (key, val) {
|
||||
this.obj[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
function pickMem (opts) {
|
||||
if (!opts || !opts.memoize) {
|
||||
return MEMOIZED
|
||||
} else if (opts.memoize.get && opts.memoize.set) {
|
||||
return opts.memoize
|
||||
} else if (typeof opts.memoize === 'object') {
|
||||
return new ObjProxy(opts.memoize)
|
||||
} else {
|
||||
return MEMOIZED
|
||||
}
|
||||
}
|
80
node_modules/cacache/lib/put.js
generated
vendored
Normal file
80
node_modules/cacache/lib/put.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
'use strict'
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const write = require('./content/write')
|
||||
const Flush = require('minipass-flush')
|
||||
const { PassThrough } = require('minipass-collect')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const putOpts = (opts) => ({
|
||||
algorithms: ['sha512'],
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = putData
|
||||
|
||||
async function putData (cache, key, data, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
const res = await write(cache, data, opts)
|
||||
const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return res.integrity
|
||||
}
|
||||
|
||||
module.exports.stream = putStream
|
||||
|
||||
function putStream (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
let integrity
|
||||
let size
|
||||
let error
|
||||
|
||||
let memoData
|
||||
const pipeline = new Pipeline()
|
||||
// first item in the pipeline is the memoizer, because we need
|
||||
// that to end first and get the collected data.
|
||||
if (memoize) {
|
||||
const memoizer = new PassThrough().on('collect', data => {
|
||||
memoData = data
|
||||
})
|
||||
pipeline.push(memoizer)
|
||||
}
|
||||
|
||||
// contentStream is a write-only, not a passthrough
|
||||
// no data comes out of it.
|
||||
const contentStream = write.stream(cache, opts)
|
||||
.on('integrity', (int) => {
|
||||
integrity = int
|
||||
})
|
||||
.on('size', (s) => {
|
||||
size = s
|
||||
})
|
||||
.on('error', (err) => {
|
||||
error = err
|
||||
})
|
||||
|
||||
pipeline.push(contentStream)
|
||||
|
||||
// last but not least, we write the index and emit hash and size,
|
||||
// and memoize if we're doing that
|
||||
pipeline.push(new Flush({
|
||||
async flush () {
|
||||
if (!error) {
|
||||
const entry = await index.insert(cache, key, integrity, { ...opts, size })
|
||||
if (memoize && memoData) {
|
||||
memo.put(cache, entry, memoData, opts)
|
||||
}
|
||||
pipeline.emit('integrity', integrity)
|
||||
pipeline.emit('size', size)
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
return pipeline
|
||||
}
|
31
node_modules/cacache/lib/rm.js
generated
vendored
Normal file
31
node_modules/cacache/lib/rm.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const rmContent = require('./content/rm')
|
||||
|
||||
module.exports = entry
|
||||
module.exports.entry = entry
|
||||
|
||||
function entry (cache, key, opts) {
|
||||
memo.clearMemoized()
|
||||
return index.delete(cache, key, opts)
|
||||
}
|
||||
|
||||
module.exports.content = content
|
||||
|
||||
function content (cache, integrity) {
|
||||
memo.clearMemoized()
|
||||
return rmContent(cache, integrity)
|
||||
}
|
||||
|
||||
module.exports.all = all
|
||||
|
||||
function all (cache) {
|
||||
memo.clearMemoized()
|
||||
return rimraf(path.join(cache, '*(content-*|index-*)'))
|
||||
}
|
145
node_modules/cacache/lib/util/fix-owner.js
generated
vendored
Normal file
145
node_modules/cacache/lib/util/fix-owner.js
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const chownr = util.promisify(require('chownr'))
|
||||
const mkdirp = require('mkdirp')
|
||||
const inflight = require('promise-inflight')
|
||||
const inferOwner = require('infer-owner')
|
||||
|
||||
// Memoize getuid()/getgid() calls.
|
||||
// patch process.setuid/setgid to invalidate cached value on change
|
||||
const self = { uid: null, gid: null }
|
||||
const getSelf = () => {
|
||||
if (typeof self.uid !== 'number') {
|
||||
self.uid = process.getuid()
|
||||
const setuid = process.setuid
|
||||
process.setuid = (uid) => {
|
||||
self.uid = null
|
||||
process.setuid = setuid
|
||||
return process.setuid(uid)
|
||||
}
|
||||
}
|
||||
if (typeof self.gid !== 'number') {
|
||||
self.gid = process.getgid()
|
||||
const setgid = process.setgid
|
||||
process.setgid = (gid) => {
|
||||
self.gid = null
|
||||
process.setgid = setgid
|
||||
return process.setgid(gid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.chownr = fixOwner
|
||||
|
||||
async function fixOwner (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return
|
||||
}
|
||||
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return
|
||||
}
|
||||
|
||||
const { uid, gid } = await inferOwner(cache)
|
||||
|
||||
// No need to override if it's already what we used.
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
return
|
||||
}
|
||||
|
||||
return inflight('fixOwner: fixing ownership on ' + filepath, () =>
|
||||
chownr(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
).catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.chownr.sync = fixOwnerSync
|
||||
|
||||
function fixOwnerSync (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return
|
||||
}
|
||||
const { uid, gid } = inferOwner.sync(cache)
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return
|
||||
}
|
||||
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
// No need to override if it's already what we used.
|
||||
return
|
||||
}
|
||||
try {
|
||||
chownr.sync(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
)
|
||||
} catch (err) {
|
||||
// only catch ENOENT, any other error is a problem.
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.mkdirfix = mkdirfix
|
||||
|
||||
async function mkdirfix (cache, p, cb) {
|
||||
// we have to infer the owner _before_ making the directory, even though
|
||||
// we aren't going to use the results, since the cache itself might not
|
||||
// exist yet. If we mkdirp it, then our current uid/gid will be assumed
|
||||
// to be correct if it creates the cache folder in the process.
|
||||
await inferOwner(cache)
|
||||
try {
|
||||
const made = await mkdirp(p)
|
||||
if (made) {
|
||||
await fixOwner(cache, made)
|
||||
return made
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
await fixOwner(cache, p)
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.mkdirfix.sync = mkdirfixSync
|
||||
|
||||
function mkdirfixSync (cache, p) {
|
||||
try {
|
||||
inferOwner.sync(cache)
|
||||
const made = mkdirp.sync(p)
|
||||
if (made) {
|
||||
fixOwnerSync(cache, made)
|
||||
return made
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
fixOwnerSync(cache, p)
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
7
node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Normal file
7
node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = hashToSegments
|
||||
|
||||
function hashToSegments (hash) {
|
||||
return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
|
||||
}
|
56
node_modules/cacache/lib/util/move-file.js
generated
vendored
Normal file
56
node_modules/cacache/lib/util/move-file.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
const move = require('@npmcli/move-file')
|
||||
const pinflight = require('promise-inflight')
|
||||
|
||||
module.exports = moveFile
|
||||
|
||||
async function moveFile (src, dest) {
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
// This isn't quite an fs.rename -- the assumption is that
|
||||
// if `dest` already exists, and we get certain errors while
|
||||
// trying to move it, we should just not bother.
|
||||
//
|
||||
// In the case of cache corruption, users will receive an
|
||||
// EINTEGRITY error elsewhere, and can remove the offending
|
||||
// content their own way.
|
||||
//
|
||||
// Note that, as the name suggests, this strictly only supports file moves.
|
||||
try {
|
||||
await fs.link(src, dest)
|
||||
} catch (err) {
|
||||
if (isWindows && err.code === 'EPERM') {
|
||||
// XXX This is a really weird way to handle this situation, as it
|
||||
// results in the src file being deleted even though the dest
|
||||
// might not exist. Since we pretty much always write files to
|
||||
// deterministic locations based on content hash, this is likely
|
||||
// ok (or at worst, just ends in a future cache miss). But it would
|
||||
// be worth investigating at some time in the future if this is
|
||||
// really what we want to do here.
|
||||
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
|
||||
// file already exists, so whatever
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
await Promise.all([
|
||||
fs.unlink(src),
|
||||
!isWindows && fs.chmod(dest, '0444'),
|
||||
])
|
||||
} catch (e) {
|
||||
return pinflight('cacache-move-file:' + dest, async () => {
|
||||
await fs.stat(dest).catch((err) => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// Something else is wrong here. Bail bail bail
|
||||
throw err
|
||||
}
|
||||
})
|
||||
// file doesn't already exist! let's try a rename -> copy fallback
|
||||
// only delete if it successfully copies
|
||||
return move(src, dest)
|
||||
})
|
||||
}
|
||||
}
|
33
node_modules/cacache/lib/util/tmp.js
generated
vendored
Normal file
33
node_modules/cacache/lib/util/tmp.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
|
||||
const fixOwner = require('./fix-owner')
|
||||
const path = require('path')
|
||||
|
||||
module.exports.mkdir = mktmpdir
|
||||
|
||||
async function mktmpdir (cache, opts = {}) {
|
||||
const { tmpPrefix } = opts
|
||||
const tmpDir = path.join(cache, 'tmp')
|
||||
await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
|
||||
// do not use path.join(), it drops the trailing / if tmpPrefix is unset
|
||||
const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
|
||||
return fs.mkdtemp(target, { owner: 'inherit' })
|
||||
}
|
||||
|
||||
module.exports.withTmp = withTmp
|
||||
|
||||
function withTmp (cache, opts, cb) {
|
||||
if (!cb) {
|
||||
cb = opts
|
||||
opts = {}
|
||||
}
|
||||
return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
|
||||
}
|
||||
|
||||
module.exports.fix = fixtmpdir
|
||||
|
||||
function fixtmpdir (cache) {
|
||||
return fixOwner(cache, path.join(cache, 'tmp'))
|
||||
}
|
257
node_modules/cacache/lib/verify.js
generated
vendored
Normal file
257
node_modules/cacache/lib/verify.js
generated
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const pMap = require('p-map')
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const fs = require('@npmcli/fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const glob = util.promisify(require('glob'))
|
||||
const index = require('./entry-index')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
|
||||
const globify = pattern => pattern.split('\\').join('/')
|
||||
|
||||
const hasOwnProperty = (obj, key) =>
|
||||
Object.prototype.hasOwnProperty.call(obj, key)
|
||||
|
||||
const verifyOpts = (opts) => ({
|
||||
concurrency: 20,
|
||||
log: { silly () {} },
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = verify
|
||||
|
||||
async function verify (cache, opts) {
|
||||
opts = verifyOpts(opts)
|
||||
opts.log.silly('verify', 'verifying cache at', cache)
|
||||
|
||||
const steps = [
|
||||
markStartTime,
|
||||
fixPerms,
|
||||
garbageCollect,
|
||||
rebuildIndex,
|
||||
cleanTmp,
|
||||
writeVerifile,
|
||||
markEndTime,
|
||||
]
|
||||
|
||||
const stats = {}
|
||||
for (const step of steps) {
|
||||
const label = step.name
|
||||
const start = new Date()
|
||||
const s = await step(cache, opts)
|
||||
if (s) {
|
||||
Object.keys(s).forEach((k) => {
|
||||
stats[k] = s[k]
|
||||
})
|
||||
}
|
||||
const end = new Date()
|
||||
if (!stats.runTime) {
|
||||
stats.runTime = {}
|
||||
}
|
||||
stats.runTime[label] = end - start
|
||||
}
|
||||
stats.runTime.total = stats.endTime - stats.startTime
|
||||
opts.log.silly(
|
||||
'verify',
|
||||
'verification finished for',
|
||||
cache,
|
||||
'in',
|
||||
`${stats.runTime.total}ms`
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function markStartTime (cache, opts) {
|
||||
return { startTime: new Date() }
|
||||
}
|
||||
|
||||
async function markEndTime (cache, opts) {
|
||||
return { endTime: new Date() }
|
||||
}
|
||||
|
||||
async function fixPerms (cache, opts) {
|
||||
opts.log.silly('verify', 'fixing cache permissions')
|
||||
await fixOwner.mkdirfix(cache, cache)
|
||||
// TODO - fix file permissions too
|
||||
await fixOwner.chownr(cache, cache)
|
||||
return null
|
||||
}
|
||||
|
||||
// Implements a naive mark-and-sweep tracing garbage collector.
|
||||
//
|
||||
// The algorithm is basically as follows:
|
||||
// 1. Read (and filter) all index entries ("pointers")
|
||||
// 2. Mark each integrity value as "live"
|
||||
// 3. Read entire filesystem tree in `content-vX/` dir
|
||||
// 4. If content is live, verify its checksum and delete it if it fails
|
||||
// 5. If content is not marked as live, rimraf it.
|
||||
//
|
||||
async function garbageCollect (cache, opts) {
|
||||
opts.log.silly('verify', 'garbage collecting content')
|
||||
const indexStream = index.lsStream(cache)
|
||||
const liveContent = new Set()
|
||||
indexStream.on('data', (entry) => {
|
||||
if (opts.filter && !opts.filter(entry)) {
|
||||
return
|
||||
}
|
||||
|
||||
liveContent.add(entry.integrity.toString())
|
||||
})
|
||||
await new Promise((resolve, reject) => {
|
||||
indexStream.on('end', resolve).on('error', reject)
|
||||
})
|
||||
const contentDir = contentPath.contentDir(cache)
|
||||
const files = await glob(globify(path.join(contentDir, '**')), {
|
||||
follow: false,
|
||||
nodir: true,
|
||||
nosort: true,
|
||||
})
|
||||
const stats = {
|
||||
verifiedContent: 0,
|
||||
reclaimedCount: 0,
|
||||
reclaimedSize: 0,
|
||||
badContentCount: 0,
|
||||
keptSize: 0,
|
||||
}
|
||||
await pMap(
|
||||
files,
|
||||
async (f) => {
|
||||
const split = f.split(/[/\\]/)
|
||||
const digest = split.slice(split.length - 3).join('')
|
||||
const algo = split[split.length - 4]
|
||||
const integrity = ssri.fromHex(digest, algo)
|
||||
if (liveContent.has(integrity.toString())) {
|
||||
const info = await verifyContent(f, integrity)
|
||||
if (!info.valid) {
|
||||
stats.reclaimedCount++
|
||||
stats.badContentCount++
|
||||
stats.reclaimedSize += info.size
|
||||
} else {
|
||||
stats.verifiedContent++
|
||||
stats.keptSize += info.size
|
||||
}
|
||||
} else {
|
||||
// No entries refer to this content. We can delete.
|
||||
stats.reclaimedCount++
|
||||
const s = await fs.stat(f)
|
||||
await rimraf(f)
|
||||
stats.reclaimedSize += s.size
|
||||
}
|
||||
return stats
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function verifyContent (filepath, sri) {
|
||||
const contentInfo = {}
|
||||
try {
|
||||
const { size } = await fs.stat(filepath)
|
||||
contentInfo.size = size
|
||||
contentInfo.valid = true
|
||||
await ssri.checkStream(new fsm.ReadStream(filepath), sri)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return { size: 0, valid: false }
|
||||
}
|
||||
if (err.code !== 'EINTEGRITY') {
|
||||
throw err
|
||||
}
|
||||
|
||||
await rimraf(filepath)
|
||||
contentInfo.valid = false
|
||||
}
|
||||
return contentInfo
|
||||
}
|
||||
|
||||
async function rebuildIndex (cache, opts) {
|
||||
opts.log.silly('verify', 'rebuilding index')
|
||||
const entries = await index.ls(cache)
|
||||
const stats = {
|
||||
missingContent: 0,
|
||||
rejectedEntries: 0,
|
||||
totalEntries: 0,
|
||||
}
|
||||
const buckets = {}
|
||||
for (const k in entries) {
|
||||
/* istanbul ignore else */
|
||||
if (hasOwnProperty(entries, k)) {
|
||||
const hashed = index.hashKey(k)
|
||||
const entry = entries[k]
|
||||
const excluded = opts.filter && !opts.filter(entry)
|
||||
excluded && stats.rejectedEntries++
|
||||
if (buckets[hashed] && !excluded) {
|
||||
buckets[hashed].push(entry)
|
||||
} else if (buckets[hashed] && excluded) {
|
||||
// skip
|
||||
} else if (excluded) {
|
||||
buckets[hashed] = []
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
} else {
|
||||
buckets[hashed] = [entry]
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
await pMap(
|
||||
Object.keys(buckets),
|
||||
(key) => {
|
||||
return rebuildBucket(cache, buckets[key], stats, opts)
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function rebuildBucket (cache, bucket, stats, opts) {
|
||||
await fs.truncate(bucket._path)
|
||||
// This needs to be serialized because cacache explicitly
|
||||
// lets very racy bucket conflicts clobber each other.
|
||||
for (const entry of bucket) {
|
||||
const content = contentPath(cache, entry.integrity)
|
||||
try {
|
||||
await fs.stat(content)
|
||||
await index.insert(cache, entry.key, entry.integrity, {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
})
|
||||
stats.totalEntries++
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
stats.rejectedEntries++
|
||||
stats.missingContent++
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanTmp (cache, opts) {
|
||||
opts.log.silly('verify', 'cleaning tmp directory')
|
||||
return rimraf(path.join(cache, 'tmp'))
|
||||
}
|
||||
|
||||
function writeVerifile (cache, opts) {
|
||||
const verifile = path.join(cache, '_lastverified')
|
||||
opts.log.silly('verify', 'writing verifile to ' + verifile)
|
||||
try {
|
||||
return fs.writeFile(verifile, `${Date.now()}`)
|
||||
} finally {
|
||||
fixOwner.chownr.sync(cache, verifile)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.lastRun = lastRun
|
||||
|
||||
async function lastRun (cache) {
|
||||
const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
|
||||
return new Date(+data)
|
||||
}
|
2
node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
2
node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
tidelift: "npm/brace-expansion"
|
||||
patreon: juliangruber
|
21
node_modules/cacache/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
21
node_modules/cacache/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
135
node_modules/cacache/node_modules/brace-expansion/README.md
generated
vendored
Normal file
135
node_modules/cacache/node_modules/brace-expansion/README.md
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
# brace-expansion
|
||||
|
||||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
||||
as known from sh/bash, in JavaScript.
|
||||
|
||||
[](http://travis-ci.org/juliangruber/brace-expansion)
|
||||
[](https://www.npmjs.org/package/brace-expansion)
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
[](https://ci.testling.com/juliangruber/brace-expansion)
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
|
||||
expand('file-{a,b,c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('-v{,,}')
|
||||
// => ['-v', '-v', '-v']
|
||||
|
||||
expand('file{0..2}.jpg')
|
||||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
||||
|
||||
expand('file-{a..c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('file{2..0}.jpg')
|
||||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
||||
|
||||
expand('file{0..4..2}.jpg')
|
||||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
||||
|
||||
expand('file-{a..e..2}.jpg')
|
||||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
||||
|
||||
expand('file{00..10..5}.jpg')
|
||||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
||||
|
||||
expand('{{A..C},{a..c}}')
|
||||
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
||||
|
||||
expand('ppp{,config,oe{,conf}}')
|
||||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
```
|
||||
|
||||
### var expanded = expand(str)
|
||||
|
||||
Return an array of all possible and valid expansions of `str`. If none are
|
||||
found, `[str]` is returned.
|
||||
|
||||
Valid expansions are:
|
||||
|
||||
```js
|
||||
/^(.*,)+(.+)?$/
|
||||
// {a,b,...}
|
||||
```
|
||||
|
||||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
||||
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
||||
to have equal length. Negative numbers and backwards iteration work too.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
||||
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
||||
number.
|
||||
|
||||
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```bash
|
||||
npm install brace-expansion
|
||||
```
|
||||
|
||||
## Contributors
|
||||
|
||||
- [Julian Gruber](https://github.com/juliangruber)
|
||||
- [Isaac Z. Schlueter](https://github.com/isaacs)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
|
||||
|
||||
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
|
||||
|
||||
## Security contact information
|
||||
|
||||
To report a security vulnerability, please use the
|
||||
[Tidelift security contact](https://tidelift.com/security).
|
||||
Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
203
node_modules/cacache/node_modules/brace-expansion/index.js
generated
vendored
Normal file
203
node_modules/cacache/node_modules/brace-expansion/index.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
var balanced = require('balanced-match');
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||||
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||||
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||||
var escComma = '\0COMMA'+Math.random()+'\0';
|
||||
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||||
|
||||
function numeric(str) {
|
||||
return parseInt(str, 10) == str
|
||||
? parseInt(str, 10)
|
||||
: str.charCodeAt(0);
|
||||
}
|
||||
|
||||
function escapeBraces(str) {
|
||||
return str.split('\\\\').join(escSlash)
|
||||
.split('\\{').join(escOpen)
|
||||
.split('\\}').join(escClose)
|
||||
.split('\\,').join(escComma)
|
||||
.split('\\.').join(escPeriod);
|
||||
}
|
||||
|
||||
function unescapeBraces(str) {
|
||||
return str.split(escSlash).join('\\')
|
||||
.split(escOpen).join('{')
|
||||
.split(escClose).join('}')
|
||||
.split(escComma).join(',')
|
||||
.split(escPeriod).join('.');
|
||||
}
|
||||
|
||||
|
||||
// Basically just str.split(","), but handling cases
|
||||
// where we have nested braced sections, which should be
|
||||
// treated as individual members, like {a,{b,c},d}
|
||||
function parseCommaParts(str) {
|
||||
if (!str)
|
||||
return [''];
|
||||
|
||||
var parts = [];
|
||||
var m = balanced('{', '}', str);
|
||||
|
||||
if (!m)
|
||||
return str.split(',');
|
||||
|
||||
var pre = m.pre;
|
||||
var body = m.body;
|
||||
var post = m.post;
|
||||
var p = pre.split(',');
|
||||
|
||||
p[p.length-1] += '{' + body + '}';
|
||||
var postParts = parseCommaParts(post);
|
||||
if (post.length) {
|
||||
p[p.length-1] += postParts.shift();
|
||||
p.push.apply(p, postParts);
|
||||
}
|
||||
|
||||
parts.push.apply(parts, p);
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function expandTop(str) {
|
||||
if (!str)
|
||||
return [];
|
||||
|
||||
// I don't know why Bash 4.3 does this, but it does.
|
||||
// Anything starting with {} will have the first two bytes preserved
|
||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
||||
// but a{},b}c will be expanded to [a}c,abc].
|
||||
// One could argue that this is a bug in Bash, but since the goal of
|
||||
// this module is to match Bash's rules, we escape a leading {}
|
||||
if (str.substr(0, 2) === '{}') {
|
||||
str = '\\{\\}' + str.substr(2);
|
||||
}
|
||||
|
||||
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||||
}
|
||||
|
||||
function embrace(str) {
|
||||
return '{' + str + '}';
|
||||
}
|
||||
function isPadded(el) {
|
||||
return /^-?0\d/.test(el);
|
||||
}
|
||||
|
||||
function lte(i, y) {
|
||||
return i <= y;
|
||||
}
|
||||
function gte(i, y) {
|
||||
return i >= y;
|
||||
}
|
||||
|
||||
function expand(str, isTop) {
|
||||
var expansions = [];
|
||||
|
||||
var m = balanced('{', '}', str);
|
||||
if (!m) return [str];
|
||||
|
||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||||
var pre = m.pre;
|
||||
var post = m.post.length
|
||||
? expand(m.post, false)
|
||||
: [''];
|
||||
|
||||
if (/\$$/.test(m.pre)) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre+ '{' + m.body + '}' + post[k];
|
||||
expansions.push(expansion);
|
||||
}
|
||||
} else {
|
||||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
var n;
|
||||
if (isSequence) {
|
||||
n = m.body.split(/\.\./);
|
||||
} else {
|
||||
n = parseCommaParts(m.body);
|
||||
if (n.length === 1) {
|
||||
// x{{a,b}}y ==> x{a}y x{b}y
|
||||
n = expand(n[0], false).map(embrace);
|
||||
if (n.length === 1) {
|
||||
return post.map(function(p) {
|
||||
return m.pre + n[0] + p;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, n is the parts, and we know it's not a comma set
|
||||
// with a single entry.
|
||||
var N;
|
||||
|
||||
if (isSequence) {
|
||||
var x = numeric(n[0]);
|
||||
var y = numeric(n[1]);
|
||||
var width = Math.max(n[0].length, n[1].length)
|
||||
var incr = n.length == 3
|
||||
? Math.abs(numeric(n[2]))
|
||||
: 1;
|
||||
var test = lte;
|
||||
var reverse = y < x;
|
||||
if (reverse) {
|
||||
incr *= -1;
|
||||
test = gte;
|
||||
}
|
||||
var pad = n.some(isPadded);
|
||||
|
||||
N = [];
|
||||
|
||||
for (var i = x; test(i, y); i += incr) {
|
||||
var c;
|
||||
if (isAlphaSequence) {
|
||||
c = String.fromCharCode(i);
|
||||
if (c === '\\')
|
||||
c = '';
|
||||
} else {
|
||||
c = String(i);
|
||||
if (pad) {
|
||||
var need = width - c.length;
|
||||
if (need > 0) {
|
||||
var z = new Array(need + 1).join('0');
|
||||
if (i < 0)
|
||||
c = '-' + z + c.slice(1);
|
||||
else
|
||||
c = z + c;
|
||||
}
|
||||
}
|
||||
}
|
||||
N.push(c);
|
||||
}
|
||||
} else {
|
||||
N = [];
|
||||
|
||||
for (var j = 0; j < n.length; j++) {
|
||||
N.push.apply(N, expand(n[j], false));
|
||||
}
|
||||
}
|
||||
|
||||
for (var j = 0; j < N.length; j++) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre + N[j] + post[k];
|
||||
if (!isTop || isSequence || expansion)
|
||||
expansions.push(expansion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return expansions;
|
||||
}
|
||||
|
46
node_modules/cacache/node_modules/brace-expansion/package.json
generated
vendored
Normal file
46
node_modules/cacache/node_modules/brace-expansion/package.json
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "brace-expansion",
|
||||
"description": "Brace expansion as known from sh/bash",
|
||||
"version": "2.0.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/brace-expansion.git"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/brace-expansion",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/*.js",
|
||||
"gentest": "bash test/generate.sh",
|
||||
"bench": "matcha test/perf/bench.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@c4312/matcha": "^1.3.1",
|
||||
"tape": "^4.6.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/20..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/25..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
}
|
||||
}
|
15
node_modules/cacache/node_modules/glob/LICENSE
generated
vendored
Normal file
15
node_modules/cacache/node_modules/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
399
node_modules/cacache/node_modules/glob/README.md
generated
vendored
Normal file
399
node_modules/cacache/node_modules/glob/README.md
generated
vendored
Normal file
@@ -0,0 +1,399 @@
|
||||
# Glob
|
||||
|
||||
Match files using the patterns the shell uses, like stars and stuff.
|
||||
|
||||
[](https://travis-ci.org/isaacs/node-glob/) [](https://ci.appveyor.com/project/isaacs/node-glob) [](https://coveralls.io/github/isaacs/node-glob?branch=master)
|
||||
|
||||
This is a glob implementation in JavaScript. It uses the `minimatch`
|
||||
library to do its matching.
|
||||
|
||||

|
||||
|
||||
## Usage
|
||||
|
||||
Install with npm
|
||||
|
||||
```
|
||||
npm i glob
|
||||
```
|
||||
|
||||
```javascript
|
||||
var glob = require("glob")
|
||||
|
||||
// options is optional
|
||||
glob("**/*.js", options, function (er, files) {
|
||||
// files is an array of filenames.
|
||||
// If the `nonull` option is set, and nothing
|
||||
// was found, then files is ["**/*.js"]
|
||||
// er is an error object or null.
|
||||
})
|
||||
```
|
||||
|
||||
## Glob Primer
|
||||
|
||||
"Globs" are the patterns you type when you do stuff like `ls *.js` on
|
||||
the command line, or put `build/*` in a `.gitignore` file.
|
||||
|
||||
Before parsing the path part patterns, braced sections are expanded
|
||||
into a set. Braced sections start with `{` and end with `}`, with any
|
||||
number of comma-delimited sections within. Braced sections may contain
|
||||
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
|
||||
|
||||
The following characters have special magic meaning when used in a
|
||||
path portion:
|
||||
|
||||
* `*` Matches 0 or more characters in a single path portion
|
||||
* `?` Matches 1 character
|
||||
* `[...]` Matches a range of characters, similar to a RegExp range.
|
||||
If the first character of the range is `!` or `^` then it matches
|
||||
any character not in the range.
|
||||
* `!(pattern|pattern|pattern)` Matches anything that does not match
|
||||
any of the patterns provided.
|
||||
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
|
||||
patterns provided.
|
||||
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
|
||||
patterns provided.
|
||||
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
|
||||
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
|
||||
provided
|
||||
* `**` If a "globstar" is alone in a path portion, then it matches
|
||||
zero or more directories and subdirectories searching for matches.
|
||||
It does not crawl symlinked directories.
|
||||
|
||||
### Dots
|
||||
|
||||
If a file or directory path portion has a `.` as the first character,
|
||||
then it will not match any glob pattern unless that pattern's
|
||||
corresponding path part also has a `.` as its first character.
|
||||
|
||||
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
|
||||
However the pattern `a/*/c` would not, because `*` does not start with
|
||||
a dot character.
|
||||
|
||||
You can make glob treat dots as normal characters by setting
|
||||
`dot:true` in the options.
|
||||
|
||||
### Basename Matching
|
||||
|
||||
If you set `matchBase:true` in the options, and the pattern has no
|
||||
slashes in it, then it will seek for any file anywhere in the tree
|
||||
with a matching basename. For example, `*.js` would match
|
||||
`test/simple/basic.js`.
|
||||
|
||||
### Empty Sets
|
||||
|
||||
If no matching files are found, then an empty array is returned. This
|
||||
differs from the shell, where the pattern itself is returned. For
|
||||
example:
|
||||
|
||||
$ echo a*s*d*f
|
||||
a*s*d*f
|
||||
|
||||
To get the bash-style behavior, set the `nonull:true` in the options.
|
||||
|
||||
### See Also:
|
||||
|
||||
* `man sh`
|
||||
* `man bash` (Search for "Pattern Matching")
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
* [minimatch documentation](https://github.com/isaacs/minimatch)
|
||||
|
||||
## glob.hasMagic(pattern, [options])
|
||||
|
||||
Returns `true` if there are any special characters in the pattern, and
|
||||
`false` otherwise.
|
||||
|
||||
Note that the options affect the results. If `noext:true` is set in
|
||||
the options object, then `+(a|b)` will not be considered a magic
|
||||
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
|
||||
then that is considered magical, unless `nobrace:true` is set in the
|
||||
options.
|
||||
|
||||
## glob(pattern, [options], cb)
|
||||
|
||||
* `pattern` `{String}` Pattern to be matched
|
||||
* `options` `{Object}`
|
||||
* `cb` `{Function}`
|
||||
* `err` `{Error | null}`
|
||||
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Perform an asynchronous glob search.
|
||||
|
||||
## glob.sync(pattern, [options])
|
||||
|
||||
* `pattern` `{String}` Pattern to be matched
|
||||
* `options` `{Object}`
|
||||
* return: `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Perform a synchronous glob search.
|
||||
|
||||
## Class: glob.Glob
|
||||
|
||||
Create a Glob object by instantiating the `glob.Glob` class.
|
||||
|
||||
```javascript
|
||||
var Glob = require("glob").Glob
|
||||
var mg = new Glob(pattern, options, cb)
|
||||
```
|
||||
|
||||
It's an EventEmitter, and starts walking the filesystem to find matches
|
||||
immediately.
|
||||
|
||||
### new glob.Glob(pattern, [options], [cb])
|
||||
|
||||
* `pattern` `{String}` pattern to search for
|
||||
* `options` `{Object}`
|
||||
* `cb` `{Function}` Called when an error occurs, or matches are found
|
||||
* `err` `{Error | null}`
|
||||
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Note that if the `sync` flag is set in the options, then matches will
|
||||
be immediately available on the `g.found` member.
|
||||
|
||||
### Properties
|
||||
|
||||
* `minimatch` The minimatch object that the glob uses.
|
||||
* `options` The options object passed in.
|
||||
* `aborted` Boolean which is set to true when calling `abort()`. There
|
||||
is no way at this time to continue a glob search after aborting, but
|
||||
you can re-use the statCache to avoid having to duplicate syscalls.
|
||||
* `cache` Convenience object. Each field has the following possible
|
||||
values:
|
||||
* `false` - Path does not exist
|
||||
* `true` - Path exists
|
||||
* `'FILE'` - Path exists, and is not a directory
|
||||
* `'DIR'` - Path exists, and is a directory
|
||||
* `[file, entries, ...]` - Path exists, is a directory, and the
|
||||
array value is the results of `fs.readdir`
|
||||
* `statCache` Cache of `fs.stat` results, to prevent statting the same
|
||||
path multiple times.
|
||||
* `symlinks` A record of which paths are symbolic links, which is
|
||||
relevant in resolving `**` patterns.
|
||||
* `realpathCache` An optional object which is passed to `fs.realpath`
|
||||
to minimize unnecessary syscalls. It is stored on the instantiated
|
||||
Glob object, and may be re-used.
|
||||
|
||||
### Events
|
||||
|
||||
* `end` When the matching is finished, this is emitted with all the
|
||||
matches found. If the `nonull` option is set, and no match was found,
|
||||
then the `matches` list contains the original pattern. The matches
|
||||
are sorted, unless the `nosort` flag is set.
|
||||
* `match` Every time a match is found, this is emitted with the specific
|
||||
thing that matched. It is not deduplicated or resolved to a realpath.
|
||||
* `error` Emitted when an unexpected error is encountered, or whenever
|
||||
any fs error occurs if `options.strict` is set.
|
||||
* `abort` When `abort()` is called, this event is raised.
|
||||
|
||||
### Methods
|
||||
|
||||
* `pause` Temporarily stop the search
|
||||
* `resume` Resume the search
|
||||
* `abort` Stop the search forever
|
||||
|
||||
### Options
|
||||
|
||||
All the options that can be passed to Minimatch can also be passed to
|
||||
Glob to change pattern matching behavior. Also, some have been added,
|
||||
or have glob-specific ramifications.
|
||||
|
||||
All options are false by default, unless otherwise noted.
|
||||
|
||||
All options are added to the Glob object, as well.
|
||||
|
||||
If you are running many `glob` operations, you can pass a Glob object
|
||||
as the `options` argument to a subsequent operation to shortcut some
|
||||
`stat` and `readdir` calls. At the very least, you may pass in shared
|
||||
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
|
||||
parallel glob operations will be sped up by sharing information about
|
||||
the filesystem.
|
||||
|
||||
* `cwd` The current working directory in which to search. Defaults
|
||||
to `process.cwd()`. This option is always coerced to use
|
||||
forward-slashes as a path separator, because it is not tested
|
||||
as a glob pattern, so there is no need to escape anything.
|
||||
* `root` The place where patterns starting with `/` will be mounted
|
||||
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
|
||||
systems, and `C:\` or some such on Windows.) This option is
|
||||
always coerced to use forward-slashes as a path separator,
|
||||
because it is not tested as a glob pattern, so there is no need
|
||||
to escape anything.
|
||||
* `windowsPathsNoEscape` Use `\\` as a path separator _only_, and
|
||||
_never_ as an escape character. If set, all `\\` characters
|
||||
are replaced with `/` in the pattern. Note that this makes it
|
||||
**impossible** to match against paths containing literal glob
|
||||
pattern characters, but allows matching with patterns constructed
|
||||
using `path.join()` and `path.resolve()` on Windows platforms,
|
||||
mimicking the (buggy!) behavior of Glob v7 and before on
|
||||
Windows. Please use with caution, and be mindful of [the caveat
|
||||
below about Windows paths](#windows). (For legacy reasons,
|
||||
this is also set if `allowWindowsEscape` is set to the exact
|
||||
value `false`.)
|
||||
* `dot` Include `.dot` files in normal matches and `globstar` matches.
|
||||
Note that an explicit dot in a portion of the pattern will always
|
||||
match dot files.
|
||||
* `nomount` By default, a pattern starting with a forward-slash will be
|
||||
"mounted" onto the root setting, so that a valid filesystem path is
|
||||
returned. Set this flag to disable that behavior.
|
||||
* `mark` Add a `/` character to directory matches. Note that this
|
||||
requires additional stat calls.
|
||||
* `nosort` Don't sort the results.
|
||||
* `stat` Set to true to stat *all* results. This reduces performance
|
||||
somewhat, and is completely unnecessary, unless `readdir` is presumed
|
||||
to be an untrustworthy indicator of file existence.
|
||||
* `silent` When an unusual error is encountered when attempting to
|
||||
read a directory, a warning will be printed to stderr. Set the
|
||||
`silent` option to true to suppress these warnings.
|
||||
* `strict` When an unusual error is encountered when attempting to
|
||||
read a directory, the process will just continue on in search of
|
||||
other matches. Set the `strict` option to raise an error in these
|
||||
cases.
|
||||
* `cache` See `cache` property above. Pass in a previously generated
|
||||
cache object to save some fs calls.
|
||||
* `statCache` A cache of results of filesystem information, to prevent
|
||||
unnecessary stat calls. While it should not normally be necessary
|
||||
to set this, you may pass the statCache from one glob() call to the
|
||||
options object of another, if you know that the filesystem will not
|
||||
change between calls. (See "Race Conditions" below.)
|
||||
* `symlinks` A cache of known symbolic links. You may pass in a
|
||||
previously generated `symlinks` object to save `lstat` calls when
|
||||
resolving `**` matches.
|
||||
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
|
||||
* `nounique` In some cases, brace-expanded patterns can result in the
|
||||
same file showing up multiple times in the result set. By default,
|
||||
this implementation prevents duplicates in the result set. Set this
|
||||
flag to disable that behavior.
|
||||
* `nonull` Set to never return an empty set, instead returning a set
|
||||
containing the pattern itself. This is the default in glob(3).
|
||||
* `debug` Set to enable debug logging in minimatch and glob.
|
||||
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
|
||||
treat it as a normal `*` instead.)
|
||||
* `noext` Do not match `+(a|b)` "extglob" patterns.
|
||||
* `nocase` Perform a case-insensitive match. Note: on
|
||||
case-insensitive filesystems, non-magic patterns will match by
|
||||
default, since `stat` and `readdir` will not raise errors.
|
||||
* `matchBase` Perform a basename-only match if the pattern does not
|
||||
contain any slash characters. That is, `*.js` would be treated as
|
||||
equivalent to `**/*.js`, matching all js files in all directories.
|
||||
* `nodir` Do not match directories, only files. (Note: to match
|
||||
*only* directories, simply put a `/` at the end of the pattern.)
|
||||
* `ignore` Add a pattern or an array of glob patterns to exclude matches.
|
||||
Note: `ignore` patterns are *always* in `dot:true` mode, regardless
|
||||
of any other settings.
|
||||
* `follow` Follow symlinked directories when expanding `**` patterns.
|
||||
Note that this can result in a lot of duplicate references in the
|
||||
presence of cyclic links.
|
||||
* `realpath` Set to true to call `fs.realpath` on all of the results.
|
||||
In the case of a symlink that cannot be resolved, the full absolute
|
||||
path to the matched entry is returned (though it will usually be a
|
||||
broken symlink)
|
||||
* `absolute` Set to true to always receive absolute paths for matched
|
||||
files. Unlike `realpath`, this also affects the values returned in
|
||||
the `match` event.
|
||||
* `fs` File-system object with Node's `fs` API. By default, the built-in
|
||||
`fs` module will be used. Set to a volume provided by a library like
|
||||
`memfs` to avoid using the "real" file-system.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between node-glob and other
|
||||
implementations, and are intentional.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.3, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
Note that symlinked directories are not crawled as part of a `**`,
|
||||
though their contents may match against subsequent portions of the
|
||||
pattern. This prevents infinite loops and duplicates and the like.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then glob returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
### Comments and Negation
|
||||
|
||||
Previously, this module let you mark a pattern as a "comment" if it
|
||||
started with a `#` character, or a "negated" pattern if it started
|
||||
with a `!` character.
|
||||
|
||||
These options were deprecated in version 5, and removed in version 6.
|
||||
|
||||
To specify things that should not match, use the `ignore` option.
|
||||
|
||||
## Windows
|
||||
|
||||
**Please only use forward-slashes in glob expressions.**
|
||||
|
||||
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||
characters are used by this glob implementation. You must use
|
||||
forward-slashes **only** in glob expressions. Back-slashes will always
|
||||
be interpreted as escape characters, not path separators.
|
||||
|
||||
Results from absolute patterns such as `/foo/*` are mounted onto the
|
||||
root setting using `path.join`. On windows, this will by default result
|
||||
in `/foo/*` matching `C:\foo\bar.txt`.
|
||||
|
||||
To automatically coerce all `\` characters to `/` in pattern
|
||||
strings, **thus making it impossible to escape literal glob
|
||||
characters**, you may set the `windowsPathsNoEscape` option to
|
||||
`true`.
|
||||
|
||||
## Race Conditions
|
||||
|
||||
Glob searching, by its very nature, is susceptible to race conditions,
|
||||
since it relies on directory walking and such.
|
||||
|
||||
As a result, it is possible that a file that exists when glob looks for
|
||||
it may have been deleted or modified by the time it returns the result.
|
||||
|
||||
As part of its internal implementation, this program caches all stat
|
||||
and readdir calls that it makes, in order to cut down on system
|
||||
overhead. However, this also makes it even more susceptible to races,
|
||||
especially if the cache or statCache objects are reused between glob
|
||||
calls.
|
||||
|
||||
Users are thus advised not to use a glob result as a guarantee of
|
||||
filesystem state in the face of rapid changes. For the vast majority
|
||||
of operations, this is never a problem.
|
||||
|
||||
## Glob Logo
|
||||
Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo).
|
||||
|
||||
The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Any change to behavior (including bugfixes) must come with a test.
|
||||
|
||||
Patches that fail tests or reduce performance will be rejected.
|
||||
|
||||
```
|
||||
# to run tests
|
||||
npm test
|
||||
|
||||
# to re-generate test fixtures
|
||||
npm run test-regen
|
||||
|
||||
# to benchmark against bash/zsh
|
||||
npm run bench
|
||||
|
||||
# to profile javascript
|
||||
npm run prof
|
||||
```
|
||||
|
||||

|
244
node_modules/cacache/node_modules/glob/common.js
generated
vendored
Normal file
244
node_modules/cacache/node_modules/glob/common.js
generated
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
exports.setopts = setopts
|
||||
exports.ownProp = ownProp
|
||||
exports.makeAbs = makeAbs
|
||||
exports.finish = finish
|
||||
exports.mark = mark
|
||||
exports.isIgnored = isIgnored
|
||||
exports.childrenIgnored = childrenIgnored
|
||||
|
||||
function ownProp (obj, field) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, field)
|
||||
}
|
||||
|
||||
var fs = require("fs")
|
||||
var path = require("path")
|
||||
var minimatch = require("minimatch")
|
||||
var isAbsolute = require("path").isAbsolute
|
||||
var Minimatch = minimatch.Minimatch
|
||||
|
||||
function alphasort (a, b) {
|
||||
return a.localeCompare(b, 'en')
|
||||
}
|
||||
|
||||
function setupIgnores (self, options) {
|
||||
self.ignore = options.ignore || []
|
||||
|
||||
if (!Array.isArray(self.ignore))
|
||||
self.ignore = [self.ignore]
|
||||
|
||||
if (self.ignore.length) {
|
||||
self.ignore = self.ignore.map(ignoreMap)
|
||||
}
|
||||
}
|
||||
|
||||
// ignore patterns are always in dot:true mode.
|
||||
function ignoreMap (pattern) {
|
||||
var gmatcher = null
|
||||
if (pattern.slice(-3) === '/**') {
|
||||
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||||
gmatcher = new Minimatch(gpattern, { dot: true })
|
||||
}
|
||||
|
||||
return {
|
||||
matcher: new Minimatch(pattern, { dot: true }),
|
||||
gmatcher: gmatcher
|
||||
}
|
||||
}
|
||||
|
||||
function setopts (self, pattern, options) {
|
||||
if (!options)
|
||||
options = {}
|
||||
|
||||
// base-matching: just use globstar for that.
|
||||
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||||
if (options.noglobstar) {
|
||||
throw new Error("base matching requires globstar")
|
||||
}
|
||||
pattern = "**/" + pattern
|
||||
}
|
||||
|
||||
self.windowsPathsNoEscape = !!options.windowsPathsNoEscape ||
|
||||
options.allowWindowsEscape === false
|
||||
if (self.windowsPathsNoEscape) {
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
self.silent = !!options.silent
|
||||
self.pattern = pattern
|
||||
self.strict = options.strict !== false
|
||||
self.realpath = !!options.realpath
|
||||
self.realpathCache = options.realpathCache || Object.create(null)
|
||||
self.follow = !!options.follow
|
||||
self.dot = !!options.dot
|
||||
self.mark = !!options.mark
|
||||
self.nodir = !!options.nodir
|
||||
if (self.nodir)
|
||||
self.mark = true
|
||||
self.sync = !!options.sync
|
||||
self.nounique = !!options.nounique
|
||||
self.nonull = !!options.nonull
|
||||
self.nosort = !!options.nosort
|
||||
self.nocase = !!options.nocase
|
||||
self.stat = !!options.stat
|
||||
self.noprocess = !!options.noprocess
|
||||
self.absolute = !!options.absolute
|
||||
self.fs = options.fs || fs
|
||||
|
||||
self.maxLength = options.maxLength || Infinity
|
||||
self.cache = options.cache || Object.create(null)
|
||||
self.statCache = options.statCache || Object.create(null)
|
||||
self.symlinks = options.symlinks || Object.create(null)
|
||||
|
||||
setupIgnores(self, options)
|
||||
|
||||
self.changedCwd = false
|
||||
var cwd = process.cwd()
|
||||
if (!ownProp(options, "cwd"))
|
||||
self.cwd = path.resolve(cwd)
|
||||
else {
|
||||
self.cwd = path.resolve(options.cwd)
|
||||
self.changedCwd = self.cwd !== cwd
|
||||
}
|
||||
|
||||
self.root = options.root || path.resolve(self.cwd, "/")
|
||||
self.root = path.resolve(self.root)
|
||||
|
||||
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||||
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||||
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
||||
self.nomount = !!options.nomount
|
||||
|
||||
if (process.platform === "win32") {
|
||||
self.root = self.root.replace(/\\/g, "/")
|
||||
self.cwd = self.cwd.replace(/\\/g, "/")
|
||||
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// disable comments and negation in Minimatch.
|
||||
// Note that they are not supported in Glob itself anyway.
|
||||
options.nonegate = true
|
||||
options.nocomment = true
|
||||
|
||||
self.minimatch = new Minimatch(pattern, options)
|
||||
self.options = self.minimatch.options
|
||||
}
|
||||
|
||||
function finish (self) {
|
||||
var nou = self.nounique
|
||||
var all = nou ? [] : Object.create(null)
|
||||
|
||||
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||||
var matches = self.matches[i]
|
||||
if (!matches || Object.keys(matches).length === 0) {
|
||||
if (self.nonull) {
|
||||
// do like the shell, and spit out the literal glob
|
||||
var literal = self.minimatch.globSet[i]
|
||||
if (nou)
|
||||
all.push(literal)
|
||||
else
|
||||
all[literal] = true
|
||||
}
|
||||
} else {
|
||||
// had matches
|
||||
var m = Object.keys(matches)
|
||||
if (nou)
|
||||
all.push.apply(all, m)
|
||||
else
|
||||
m.forEach(function (m) {
|
||||
all[m] = true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!nou)
|
||||
all = Object.keys(all)
|
||||
|
||||
if (!self.nosort)
|
||||
all = all.sort(alphasort)
|
||||
|
||||
// at *some* point we statted all of these
|
||||
if (self.mark) {
|
||||
for (var i = 0; i < all.length; i++) {
|
||||
all[i] = self._mark(all[i])
|
||||
}
|
||||
if (self.nodir) {
|
||||
all = all.filter(function (e) {
|
||||
var notDir = !(/\/$/.test(e))
|
||||
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
||||
if (notDir && c)
|
||||
notDir = c !== 'DIR' && !Array.isArray(c)
|
||||
return notDir
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (self.ignore.length)
|
||||
all = all.filter(function(m) {
|
||||
return !isIgnored(self, m)
|
||||
})
|
||||
|
||||
self.found = all
|
||||
}
|
||||
|
||||
function mark (self, p) {
|
||||
var abs = makeAbs(self, p)
|
||||
var c = self.cache[abs]
|
||||
var m = p
|
||||
if (c) {
|
||||
var isDir = c === 'DIR' || Array.isArray(c)
|
||||
var slash = p.slice(-1) === '/'
|
||||
|
||||
if (isDir && !slash)
|
||||
m += '/'
|
||||
else if (!isDir && slash)
|
||||
m = m.slice(0, -1)
|
||||
|
||||
if (m !== p) {
|
||||
var mabs = makeAbs(self, m)
|
||||
self.statCache[mabs] = self.statCache[abs]
|
||||
self.cache[mabs] = self.cache[abs]
|
||||
}
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
// lotta situps...
|
||||
function makeAbs (self, f) {
|
||||
var abs = f
|
||||
if (f.charAt(0) === '/') {
|
||||
abs = path.join(self.root, f)
|
||||
} else if (isAbsolute(f) || f === '') {
|
||||
abs = f
|
||||
} else if (self.changedCwd) {
|
||||
abs = path.resolve(self.cwd, f)
|
||||
} else {
|
||||
abs = path.resolve(f)
|
||||
}
|
||||
|
||||
if (process.platform === 'win32')
|
||||
abs = abs.replace(/\\/g, '/')
|
||||
|
||||
return abs
|
||||
}
|
||||
|
||||
|
||||
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||||
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||||
function isIgnored (self, path) {
|
||||
if (!self.ignore.length)
|
||||
return false
|
||||
|
||||
return self.ignore.some(function(item) {
|
||||
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||||
})
|
||||
}
|
||||
|
||||
function childrenIgnored (self, path) {
|
||||
if (!self.ignore.length)
|
||||
return false
|
||||
|
||||
return self.ignore.some(function(item) {
|
||||
return !!(item.gmatcher && item.gmatcher.match(path))
|
||||
})
|
||||
}
|
790
node_modules/cacache/node_modules/glob/glob.js
generated
vendored
Normal file
790
node_modules/cacache/node_modules/glob/glob.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
55
node_modules/cacache/node_modules/glob/package.json
generated
vendored
Normal file
55
node_modules/cacache/node_modules/glob/package.json
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"name": "glob",
|
||||
"description": "a little globber",
|
||||
"version": "8.1.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/node-glob.git"
|
||||
},
|
||||
"main": "glob.js",
|
||||
"files": [
|
||||
"glob.js",
|
||||
"sync.js",
|
||||
"common.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^5.0.1",
|
||||
"once": "^1.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"memfs": "^3.2.0",
|
||||
"mkdirp": "0",
|
||||
"rimraf": "^2.2.8",
|
||||
"tap": "^16.0.1",
|
||||
"tick": "0.0.6"
|
||||
},
|
||||
"tap": {
|
||||
"before": "test/00-setup.js",
|
||||
"after": "test/zz-cleanup.js",
|
||||
"statements": 90,
|
||||
"branches": 90,
|
||||
"functions": 90,
|
||||
"lines": 90,
|
||||
"jobs": 1
|
||||
},
|
||||
"scripts": {
|
||||
"prepublish": "npm run benchclean",
|
||||
"profclean": "rm -f v8.log profile.txt",
|
||||
"test": "tap",
|
||||
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js",
|
||||
"bench": "bash benchmark.sh",
|
||||
"prof": "bash prof.sh && cat profile.txt",
|
||||
"benchclean": "node benchclean.js"
|
||||
},
|
||||
"license": "ISC",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
}
|
486
node_modules/cacache/node_modules/glob/sync.js
generated
vendored
Normal file
486
node_modules/cacache/node_modules/glob/sync.js
generated
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
module.exports = globSync
|
||||
globSync.GlobSync = GlobSync
|
||||
|
||||
var rp = require('fs.realpath')
|
||||
var minimatch = require('minimatch')
|
||||
var Minimatch = minimatch.Minimatch
|
||||
var Glob = require('./glob.js').Glob
|
||||
var util = require('util')
|
||||
var path = require('path')
|
||||
var assert = require('assert')
|
||||
var isAbsolute = require('path').isAbsolute
|
||||
var common = require('./common.js')
|
||||
var setopts = common.setopts
|
||||
var ownProp = common.ownProp
|
||||
var childrenIgnored = common.childrenIgnored
|
||||
var isIgnored = common.isIgnored
|
||||
|
||||
function globSync (pattern, options) {
|
||||
if (typeof options === 'function' || arguments.length === 3)
|
||||
throw new TypeError('callback provided to sync glob\n'+
|
||||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||
|
||||
return new GlobSync(pattern, options).found
|
||||
}
|
||||
|
||||
function GlobSync (pattern, options) {
|
||||
if (!pattern)
|
||||
throw new Error('must provide pattern')
|
||||
|
||||
if (typeof options === 'function' || arguments.length === 3)
|
||||
throw new TypeError('callback provided to sync glob\n'+
|
||||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||
|
||||
if (!(this instanceof GlobSync))
|
||||
return new GlobSync(pattern, options)
|
||||
|
||||
setopts(this, pattern, options)
|
||||
|
||||
if (this.noprocess)
|
||||
return this
|
||||
|
||||
var n = this.minimatch.set.length
|
||||
this.matches = new Array(n)
|
||||
for (var i = 0; i < n; i ++) {
|
||||
this._process(this.minimatch.set[i], i, false)
|
||||
}
|
||||
this._finish()
|
||||
}
|
||||
|
||||
GlobSync.prototype._finish = function () {
|
||||
assert.ok(this instanceof GlobSync)
|
||||
if (this.realpath) {
|
||||
var self = this
|
||||
this.matches.forEach(function (matchset, index) {
|
||||
var set = self.matches[index] = Object.create(null)
|
||||
for (var p in matchset) {
|
||||
try {
|
||||
p = self._makeAbs(p)
|
||||
var real = rp.realpathSync(p, self.realpathCache)
|
||||
set[real] = true
|
||||
} catch (er) {
|
||||
if (er.syscall === 'stat')
|
||||
set[self._makeAbs(p)] = true
|
||||
else
|
||||
throw er
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
common.finish(this)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||||
assert.ok(this instanceof GlobSync)
|
||||
|
||||
// Get the first [n] parts of pattern that are all strings.
|
||||
var n = 0
|
||||
while (typeof pattern[n] === 'string') {
|
||||
n ++
|
||||
}
|
||||
// now n is the index of the first one that is *not* a string.
|
||||
|
||||
// See if there's anything else
|
||||
var prefix
|
||||
switch (n) {
|
||||
// if not, then this is rather simple
|
||||
case pattern.length:
|
||||
this._processSimple(pattern.join('/'), index)
|
||||
return
|
||||
|
||||
case 0:
|
||||
// pattern *starts* with some non-trivial item.
|
||||
// going to readdir(cwd), but not include the prefix in matches.
|
||||
prefix = null
|
||||
break
|
||||
|
||||
default:
|
||||
// pattern has some string bits in the front.
|
||||
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||
// or 'relative' like '../baz'
|
||||
prefix = pattern.slice(0, n).join('/')
|
||||
break
|
||||
}
|
||||
|
||||
var remain = pattern.slice(n)
|
||||
|
||||
// get the list of entries.
|
||||
var read
|
||||
if (prefix === null)
|
||||
read = '.'
|
||||
else if (isAbsolute(prefix) ||
|
||||
isAbsolute(pattern.map(function (p) {
|
||||
return typeof p === 'string' ? p : '[*]'
|
||||
}).join('/'))) {
|
||||
if (!prefix || !isAbsolute(prefix))
|
||||
prefix = '/' + prefix
|
||||
read = prefix
|
||||
} else
|
||||
read = prefix
|
||||
|
||||
var abs = this._makeAbs(read)
|
||||
|
||||
//if ignored, skip processing
|
||||
if (childrenIgnored(this, read))
|
||||
return
|
||||
|
||||
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||
if (isGlobStar)
|
||||
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||||
else
|
||||
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||
var entries = this._readdir(abs, inGlobStar)
|
||||
|
||||
// if the abs isn't a dir, then nothing can match!
|
||||
if (!entries)
|
||||
return
|
||||
|
||||
// It will only match dot entries if it starts with a dot, or if
|
||||
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||
var pn = remain[0]
|
||||
var negate = !!this.minimatch.negate
|
||||
var rawGlob = pn._glob
|
||||
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||
|
||||
var matchedEntries = []
|
||||
for (var i = 0; i < entries.length; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) !== '.' || dotOk) {
|
||||
var m
|
||||
if (negate && !prefix) {
|
||||
m = !e.match(pn)
|
||||
} else {
|
||||
m = e.match(pn)
|
||||
}
|
||||
if (m)
|
||||
matchedEntries.push(e)
|
||||
}
|
||||
}
|
||||
|
||||
var len = matchedEntries.length
|
||||
// If there are no matched entries, then nothing matches.
|
||||
if (len === 0)
|
||||
return
|
||||
|
||||
// if this is the last remaining pattern bit, then no need for
|
||||
// an additional stat *unless* the user has specified mark or
|
||||
// stat explicitly. We know they exist, since readdir returned
|
||||
// them.
|
||||
|
||||
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
if (prefix) {
|
||||
if (prefix.slice(-1) !== '/')
|
||||
e = prefix + '/' + e
|
||||
else
|
||||
e = prefix + e
|
||||
}
|
||||
|
||||
if (e.charAt(0) === '/' && !this.nomount) {
|
||||
e = path.join(this.root, e)
|
||||
}
|
||||
this._emitMatch(index, e)
|
||||
}
|
||||
// This was the last one, and no stats were needed
|
||||
return
|
||||
}
|
||||
|
||||
// now test all matched entries as stand-ins for that part
|
||||
// of the pattern.
|
||||
remain.shift()
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
var newPattern
|
||||
if (prefix)
|
||||
newPattern = [prefix, e]
|
||||
else
|
||||
newPattern = [e]
|
||||
this._process(newPattern.concat(remain), index, inGlobStar)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._emitMatch = function (index, e) {
|
||||
if (isIgnored(this, e))
|
||||
return
|
||||
|
||||
var abs = this._makeAbs(e)
|
||||
|
||||
if (this.mark)
|
||||
e = this._mark(e)
|
||||
|
||||
if (this.absolute) {
|
||||
e = abs
|
||||
}
|
||||
|
||||
if (this.matches[index][e])
|
||||
return
|
||||
|
||||
if (this.nodir) {
|
||||
var c = this.cache[abs]
|
||||
if (c === 'DIR' || Array.isArray(c))
|
||||
return
|
||||
}
|
||||
|
||||
this.matches[index][e] = true
|
||||
|
||||
if (this.stat)
|
||||
this._stat(e)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||||
// follow all symlinked directories forever
|
||||
// just proceed as if this is a non-globstar situation
|
||||
if (this.follow)
|
||||
return this._readdir(abs, false)
|
||||
|
||||
var entries
|
||||
var lstat
|
||||
var stat
|
||||
try {
|
||||
lstat = this.fs.lstatSync(abs)
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
// lstat failed, doesn't exist
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
var isSym = lstat && lstat.isSymbolicLink()
|
||||
this.symlinks[abs] = isSym
|
||||
|
||||
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||
// don't bother doing a readdir in that case.
|
||||
if (!isSym && lstat && !lstat.isDirectory())
|
||||
this.cache[abs] = 'FILE'
|
||||
else
|
||||
entries = this._readdir(abs, false)
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||||
var entries
|
||||
|
||||
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||
return this._readdirInGlobStar(abs)
|
||||
|
||||
if (ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
if (!c || c === 'FILE')
|
||||
return null
|
||||
|
||||
if (Array.isArray(c))
|
||||
return c
|
||||
}
|
||||
|
||||
try {
|
||||
return this._readdirEntries(abs, this.fs.readdirSync(abs))
|
||||
} catch (er) {
|
||||
this._readdirError(abs, er)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||||
// if we haven't asked to stat everything, then just
|
||||
// assume that everything in there exists, so we can avoid
|
||||
// having to stat it a second time.
|
||||
if (!this.mark && !this.stat) {
|
||||
for (var i = 0; i < entries.length; i ++) {
|
||||
var e = entries[i]
|
||||
if (abs === '/')
|
||||
e = abs + e
|
||||
else
|
||||
e = abs + '/' + e
|
||||
this.cache[e] = true
|
||||
}
|
||||
}
|
||||
|
||||
this.cache[abs] = entries
|
||||
|
||||
// mark and cache dir-ness
|
||||
return entries
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdirError = function (f, er) {
|
||||
// handle errors, and cache the information
|
||||
switch (er.code) {
|
||||
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||
var abs = this._makeAbs(f)
|
||||
this.cache[abs] = 'FILE'
|
||||
if (abs === this.cwdAbs) {
|
||||
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||||
error.path = this.cwd
|
||||
error.code = er.code
|
||||
throw error
|
||||
}
|
||||
break
|
||||
|
||||
case 'ENOENT': // not terribly unusual
|
||||
case 'ELOOP':
|
||||
case 'ENAMETOOLONG':
|
||||
case 'UNKNOWN':
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
break
|
||||
|
||||
default: // some unusual error. Treat as failure.
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
if (this.strict)
|
||||
throw er
|
||||
if (!this.silent)
|
||||
console.error('glob error', er)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||
|
||||
var entries = this._readdir(abs, inGlobStar)
|
||||
|
||||
// no entries means not a dir, so it can never have matches
|
||||
// foo.txt/** doesn't match foo.txt
|
||||
if (!entries)
|
||||
return
|
||||
|
||||
// test without the globstar, and with every child both below
|
||||
// and replacing the globstar.
|
||||
var remainWithoutGlobStar = remain.slice(1)
|
||||
var gspref = prefix ? [ prefix ] : []
|
||||
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||
|
||||
// the noGlobStar pattern exits the inGlobStar state
|
||||
this._process(noGlobStar, index, false)
|
||||
|
||||
var len = entries.length
|
||||
var isSym = this.symlinks[abs]
|
||||
|
||||
// If it's a symlink, and we're in a globstar, then stop
|
||||
if (isSym && inGlobStar)
|
||||
return
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) === '.' && !this.dot)
|
||||
continue
|
||||
|
||||
// these two cases enter the inGlobStar state
|
||||
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||
this._process(instead, index, true)
|
||||
|
||||
var below = gspref.concat(entries[i], remain)
|
||||
this._process(below, index, true)
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._processSimple = function (prefix, index) {
|
||||
// XXX review this. Shouldn't it be doing the mounting etc
|
||||
// before doing stat? kinda weird?
|
||||
var exists = this._stat(prefix)
|
||||
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
// If it doesn't exist, then just mark the lack of results
|
||||
if (!exists)
|
||||
return
|
||||
|
||||
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||
var trail = /[\/\\]$/.test(prefix)
|
||||
if (prefix.charAt(0) === '/') {
|
||||
prefix = path.join(this.root, prefix)
|
||||
} else {
|
||||
prefix = path.resolve(this.root, prefix)
|
||||
if (trail)
|
||||
prefix += '/'
|
||||
}
|
||||
}
|
||||
|
||||
if (process.platform === 'win32')
|
||||
prefix = prefix.replace(/\\/g, '/')
|
||||
|
||||
// Mark this as a match
|
||||
this._emitMatch(index, prefix)
|
||||
}
|
||||
|
||||
// Returns either 'DIR', 'FILE', or false
|
||||
GlobSync.prototype._stat = function (f) {
|
||||
var abs = this._makeAbs(f)
|
||||
var needDir = f.slice(-1) === '/'
|
||||
|
||||
if (f.length > this.maxLength)
|
||||
return false
|
||||
|
||||
if (!this.stat && ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
|
||||
if (Array.isArray(c))
|
||||
c = 'DIR'
|
||||
|
||||
// It exists, but maybe not how we need it
|
||||
if (!needDir || c === 'DIR')
|
||||
return c
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return false
|
||||
|
||||
// otherwise we have to stat, because maybe c=true
|
||||
// if we know it exists, but not what it is.
|
||||
}
|
||||
|
||||
var exists
|
||||
var stat = this.statCache[abs]
|
||||
if (!stat) {
|
||||
var lstat
|
||||
try {
|
||||
lstat = this.fs.lstatSync(abs)
|
||||
} catch (er) {
|
||||
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||||
this.statCache[abs] = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if (lstat && lstat.isSymbolicLink()) {
|
||||
try {
|
||||
stat = this.fs.statSync(abs)
|
||||
} catch (er) {
|
||||
stat = lstat
|
||||
}
|
||||
} else {
|
||||
stat = lstat
|
||||
}
|
||||
}
|
||||
|
||||
this.statCache[abs] = stat
|
||||
|
||||
var c = true
|
||||
if (stat)
|
||||
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||
|
||||
this.cache[abs] = this.cache[abs] || c
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return false
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
GlobSync.prototype._mark = function (p) {
|
||||
return common.mark(this, p)
|
||||
}
|
||||
|
||||
GlobSync.prototype._makeAbs = function (f) {
|
||||
return common.makeAbs(this, f)
|
||||
}
|
15
node_modules/cacache/node_modules/minimatch/LICENSE
generated
vendored
Normal file
15
node_modules/cacache/node_modules/minimatch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
259
node_modules/cacache/node_modules/minimatch/README.md
generated
vendored
Normal file
259
node_modules/cacache/node_modules/minimatch/README.md
generated
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
# minimatch
|
||||
|
||||
A minimal matching utility.
|
||||
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
||||
It works by converting glob expressions into JavaScript `RegExp`
|
||||
objects.
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var minimatch = require("minimatch")
|
||||
|
||||
minimatch("bar.foo", "*.foo") // true!
|
||||
minimatch("bar.foo", "*.bar") // false!
|
||||
minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
* Brace Expansion
|
||||
* Extended glob matching
|
||||
* "Globstar" `**` matching
|
||||
|
||||
See:
|
||||
|
||||
* `man sh`
|
||||
* `man bash`
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
|
||||
## Windows
|
||||
|
||||
**Please only use forward-slashes in glob expressions.**
|
||||
|
||||
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||
characters are used by this glob implementation. You must use
|
||||
forward-slashes **only** in glob expressions. Back-slashes in patterns
|
||||
will always be interpreted as escape characters, not path separators.
|
||||
|
||||
Note that `\` or `/` _will_ be interpreted as path separators in paths on
|
||||
Windows, and will match against `/` in glob expressions.
|
||||
|
||||
So just always use `/` in patterns.
|
||||
|
||||
## Minimatch Class
|
||||
|
||||
Create a minimatch object by instantiating the `minimatch.Minimatch` class.
|
||||
|
||||
```javascript
|
||||
var Minimatch = require("minimatch").Minimatch
|
||||
var mm = new Minimatch(pattern, options)
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
* `pattern` The original pattern the minimatch object represents.
|
||||
* `options` The options supplied to the constructor.
|
||||
* `set` A 2-dimensional array of regexp or string expressions.
|
||||
Each row in the
|
||||
array corresponds to a brace-expanded pattern. Each item in the row
|
||||
corresponds to a single path-part. For example, the pattern
|
||||
`{a,b/c}/d` would expand to a set of patterns like:
|
||||
|
||||
[ [ a, d ]
|
||||
, [ b, c, d ] ]
|
||||
|
||||
If a portion of the pattern doesn't have any "magic" in it
|
||||
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||
will be left as a string rather than converted to a regular
|
||||
expression.
|
||||
|
||||
* `regexp` Created by the `makeRe` method. A single regular expression
|
||||
expressing the entire pattern. This is useful in cases where you wish
|
||||
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||
* `negate` True if the pattern is negated.
|
||||
* `comment` True if the pattern is a comment.
|
||||
* `empty` True if the pattern is `""`.
|
||||
|
||||
### Methods
|
||||
|
||||
* `makeRe` Generate the `regexp` member if necessary, and return it.
|
||||
Will return `false` if the pattern is invalid.
|
||||
* `match(fname)` Return true if the filename matches the pattern, or
|
||||
false otherwise.
|
||||
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||
filename, and match it against a single row in the `regExpSet`. This
|
||||
method is mainly for internal use, but is exposed so that it can be
|
||||
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
|
||||
All other methods are internal, and will be called as necessary.
|
||||
|
||||
### minimatch(path, pattern, options)
|
||||
|
||||
Main export. Tests a path against the pattern using the options.
|
||||
|
||||
```javascript
|
||||
var isJS = minimatch(file, "*.js", { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.filter(pattern, options)
|
||||
|
||||
Returns a function that tests its
|
||||
supplied argument, suitable for use with `Array.filter`. Example:
|
||||
|
||||
```javascript
|
||||
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.match(list, pattern, options)
|
||||
|
||||
Match against the list of
|
||||
files, in the style of fnmatch or glob. If nothing is matched, and
|
||||
options.nonull is set, then return a list containing the pattern itself.
|
||||
|
||||
```javascript
|
||||
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})
|
||||
```
|
||||
|
||||
### minimatch.makeRe(pattern, options)
|
||||
|
||||
Make a regular expression object from the pattern.
|
||||
|
||||
## Options
|
||||
|
||||
All options are `false` by default.
|
||||
|
||||
### debug
|
||||
|
||||
Dump a ton of stuff to stderr.
|
||||
|
||||
### nobrace
|
||||
|
||||
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
|
||||
### noglobstar
|
||||
|
||||
Disable `**` matching against multiple folder names.
|
||||
|
||||
### dot
|
||||
|
||||
Allow patterns to match filenames starting with a period, even if
|
||||
the pattern does not explicitly have a period in that spot.
|
||||
|
||||
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||
is set.
|
||||
|
||||
### noext
|
||||
|
||||
Disable "extglob" style patterns like `+(a|b)`.
|
||||
|
||||
### nocase
|
||||
|
||||
Perform a case-insensitive match.
|
||||
|
||||
### nonull
|
||||
|
||||
When a match is not found by `minimatch.match`, return a list containing
|
||||
the pattern itself if this option is set. When not set, an empty list
|
||||
is returned if there are no matches.
|
||||
|
||||
### matchBase
|
||||
|
||||
If set, then patterns without slashes will be matched
|
||||
against the basename of the path if it contains slashes. For example,
|
||||
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
|
||||
### nocomment
|
||||
|
||||
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||
comment.
|
||||
|
||||
### nonegate
|
||||
|
||||
Suppress the behavior of treating a leading `!` character as negation.
|
||||
|
||||
### flipNegate
|
||||
|
||||
Returns from negate expressions the same as if they were not negated.
|
||||
(Ie, true on a hit, false on a miss.)
|
||||
|
||||
### partial
|
||||
|
||||
Compare a partial path to a pattern. As long as the parts of the path that
|
||||
are present are not contradicted by the pattern, it will be treated as a
|
||||
match. This is useful in applications where you're walking through a
|
||||
folder structure, and don't yet have the full path, but want to ensure that
|
||||
you do not walk down paths that can never be a match.
|
||||
|
||||
For example,
|
||||
|
||||
```js
|
||||
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||
```
|
||||
|
||||
### windowsPathsNoEscape
|
||||
|
||||
Use `\\` as a path separator _only_, and _never_ as an escape
|
||||
character. If set, all `\\` characters are replaced with `/` in
|
||||
the pattern. Note that this makes it **impossible** to match
|
||||
against paths containing literal glob pattern characters, but
|
||||
allows matching with patterns constructed using `path.join()` and
|
||||
`path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||
behavior of earlier versions on Windows. Please use with
|
||||
caution, and be mindful of [the caveat about Windows
|
||||
paths](#windows).
|
||||
|
||||
For legacy reasons, this is also set if
|
||||
`options.allowWindowsEscape` is set to the exact value `false`.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between minimatch and other
|
||||
implementations, and are intentional.
|
||||
|
||||
If the pattern starts with a `!` character, then it is negated. Set the
|
||||
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||
characters normally. This is perhaps relevant if you wish to start the
|
||||
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||
characters at the start of a pattern will negate the pattern multiple
|
||||
times.
|
||||
|
||||
If a pattern starts with `#`, then it is treated as a comment, and
|
||||
will not match anything. Use `\#` to match a literal `#` at the
|
||||
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.1, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then minimatch.match returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
Note that `fnmatch(3)` in libc is an extremely naive string comparison
|
||||
matcher, which does not do anything special for slashes. This library is
|
||||
designed to be used in glob searching and file walkers, and so it does do
|
||||
special things with `/`. Thus, `foo*` will not match `foo/bar` in this
|
||||
library, even though it would in `fnmatch(3)`.
|
4
node_modules/cacache/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
4
node_modules/cacache/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
const isWindows = typeof process === 'object' &&
|
||||
process &&
|
||||
process.platform === 'win32'
|
||||
module.exports = isWindows ? { sep: '\\' } : { sep: '/' }
|
944
node_modules/cacache/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
944
node_modules/cacache/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
35
node_modules/cacache/node_modules/minimatch/package.json
generated
vendored
Normal file
35
node_modules/cacache/node_modules/minimatch/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "minimatch",
|
||||
"description": "a glob matcher in javascript",
|
||||
"publishConfig": {
|
||||
"tag": "legacy-v5"
|
||||
},
|
||||
"version": "5.1.6",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/minimatch.git"
|
||||
},
|
||||
"main": "minimatch.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^16.3.2"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"minimatch.js",
|
||||
"lib"
|
||||
]
|
||||
}
|
67
node_modules/cacache/node_modules/p-map/index.d.ts
generated
vendored
Normal file
67
node_modules/cacache/node_modules/p-map/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
declare namespace pMap {
|
||||
interface Options {
|
||||
/**
|
||||
Number of concurrently pending promises returned by `mapper`.
|
||||
|
||||
Must be an integer from 1 and up or `Infinity`.
|
||||
|
||||
@default Infinity
|
||||
*/
|
||||
readonly concurrency?: number;
|
||||
|
||||
/**
|
||||
When set to `false`, instead of stopping when a promise rejects, it will wait for all the promises to settle and then reject with an [aggregated error](https://github.com/sindresorhus/aggregate-error) containing all the errors from the rejected promises.
|
||||
|
||||
@default true
|
||||
*/
|
||||
readonly stopOnError?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
Function which is called for every item in `input`. Expected to return a `Promise` or value.
|
||||
|
||||
@param element - Iterated element.
|
||||
@param index - Index of the element in the source array.
|
||||
*/
|
||||
type Mapper<Element = any, NewElement = unknown> = (
|
||||
element: Element,
|
||||
index: number
|
||||
) => NewElement | Promise<NewElement>;
|
||||
}
|
||||
|
||||
/**
|
||||
@param input - Iterated over concurrently in the `mapper` function.
|
||||
@param mapper - Function which is called for every item in `input`. Expected to return a `Promise` or value.
|
||||
@returns A `Promise` that is fulfilled when all promises in `input` and ones returned from `mapper` are fulfilled, or rejects if any of the promises reject. The fulfilled value is an `Array` of the fulfilled values returned from `mapper` in `input` order.
|
||||
|
||||
@example
|
||||
```
|
||||
import pMap = require('p-map');
|
||||
import got = require('got');
|
||||
|
||||
const sites = [
|
||||
getWebsiteFromUsername('https://sindresorhus'), //=> Promise
|
||||
'https://ava.li',
|
||||
'https://github.com'
|
||||
];
|
||||
|
||||
(async () => {
|
||||
const mapper = async site => {
|
||||
const {requestUrl} = await got.head(site);
|
||||
return requestUrl;
|
||||
};
|
||||
|
||||
const result = await pMap(sites, mapper, {concurrency: 2});
|
||||
|
||||
console.log(result);
|
||||
//=> ['https://sindresorhus.com/', 'https://ava.li/', 'https://github.com/']
|
||||
})();
|
||||
```
|
||||
*/
|
||||
declare function pMap<Element, NewElement>(
|
||||
input: Iterable<Element>,
|
||||
mapper: pMap.Mapper<Element, NewElement>,
|
||||
options?: pMap.Options
|
||||
): Promise<NewElement[]>;
|
||||
|
||||
export = pMap;
|
81
node_modules/cacache/node_modules/p-map/index.js
generated
vendored
Normal file
81
node_modules/cacache/node_modules/p-map/index.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
'use strict';
|
||||
const AggregateError = require('aggregate-error');
|
||||
|
||||
module.exports = async (
|
||||
iterable,
|
||||
mapper,
|
||||
{
|
||||
concurrency = Infinity,
|
||||
stopOnError = true
|
||||
} = {}
|
||||
) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof mapper !== 'function') {
|
||||
throw new TypeError('Mapper function is required');
|
||||
}
|
||||
|
||||
if (!((Number.isSafeInteger(concurrency) || concurrency === Infinity) && concurrency >= 1)) {
|
||||
throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
|
||||
}
|
||||
|
||||
const result = [];
|
||||
const errors = [];
|
||||
const iterator = iterable[Symbol.iterator]();
|
||||
let isRejected = false;
|
||||
let isIterableDone = false;
|
||||
let resolvingCount = 0;
|
||||
let currentIndex = 0;
|
||||
|
||||
const next = () => {
|
||||
if (isRejected) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextItem = iterator.next();
|
||||
const index = currentIndex;
|
||||
currentIndex++;
|
||||
|
||||
if (nextItem.done) {
|
||||
isIterableDone = true;
|
||||
|
||||
if (resolvingCount === 0) {
|
||||
if (!stopOnError && errors.length !== 0) {
|
||||
reject(new AggregateError(errors));
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
resolvingCount++;
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
const element = await nextItem.value;
|
||||
result[index] = await mapper(element, index);
|
||||
resolvingCount--;
|
||||
next();
|
||||
} catch (error) {
|
||||
if (stopOnError) {
|
||||
isRejected = true;
|
||||
reject(error);
|
||||
} else {
|
||||
errors.push(error);
|
||||
resolvingCount--;
|
||||
next();
|
||||
}
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
for (let i = 0; i < concurrency; i++) {
|
||||
next();
|
||||
|
||||
if (isIterableDone) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
9
node_modules/cacache/node_modules/p-map/license
generated
vendored
Normal file
9
node_modules/cacache/node_modules/p-map/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
53
node_modules/cacache/node_modules/p-map/package.json
generated
vendored
Normal file
53
node_modules/cacache/node_modules/p-map/package.json
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "p-map",
|
||||
"version": "4.0.0",
|
||||
"description": "Map over promises concurrently",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/p-map",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"promise",
|
||||
"map",
|
||||
"resolved",
|
||||
"wait",
|
||||
"collection",
|
||||
"iterable",
|
||||
"iterator",
|
||||
"race",
|
||||
"fulfilled",
|
||||
"async",
|
||||
"await",
|
||||
"promises",
|
||||
"concurrently",
|
||||
"concurrency",
|
||||
"parallel",
|
||||
"bluebird"
|
||||
],
|
||||
"dependencies": {
|
||||
"aggregate-error": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^2.2.0",
|
||||
"delay": "^4.1.0",
|
||||
"in-range": "^2.0.0",
|
||||
"random-int": "^2.0.0",
|
||||
"time-span": "^3.1.0",
|
||||
"tsd": "^0.7.4",
|
||||
"xo": "^0.27.2"
|
||||
}
|
||||
}
|
89
node_modules/cacache/node_modules/p-map/readme.md
generated
vendored
Normal file
89
node_modules/cacache/node_modules/p-map/readme.md
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
# p-map [](https://travis-ci.org/sindresorhus/p-map)
|
||||
|
||||
> Map over promises concurrently
|
||||
|
||||
Useful when you need to run promise-returning & async functions multiple times with different inputs concurrently.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install p-map
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const pMap = require('p-map');
|
||||
const got = require('got');
|
||||
|
||||
const sites = [
|
||||
getWebsiteFromUsername('https://sindresorhus'), //=> Promise
|
||||
'https://ava.li',
|
||||
'https://github.com'
|
||||
];
|
||||
|
||||
(async () => {
|
||||
const mapper = async site => {
|
||||
const {requestUrl} = await got.head(site);
|
||||
return requestUrl;
|
||||
};
|
||||
|
||||
const result = await pMap(sites, mapper, {concurrency: 2});
|
||||
|
||||
console.log(result);
|
||||
//=> ['https://sindresorhus.com/', 'https://ava.li/', 'https://github.com/']
|
||||
})();
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### pMap(input, mapper, options?)
|
||||
|
||||
Returns a `Promise` that is fulfilled when all promises in `input` and ones returned from `mapper` are fulfilled, or rejects if any of the promises reject. The fulfilled value is an `Array` of the fulfilled values returned from `mapper` in `input` order.
|
||||
|
||||
#### input
|
||||
|
||||
Type: `Iterable<Promise | unknown>`
|
||||
|
||||
Iterated over concurrently in the `mapper` function.
|
||||
|
||||
#### mapper(element, index)
|
||||
|
||||
Type: `Function`
|
||||
|
||||
Expected to return a `Promise` or value.
|
||||
|
||||
#### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
##### concurrency
|
||||
|
||||
Type: `number` (Integer)\
|
||||
Default: `Infinity`\
|
||||
Minimum: `1`
|
||||
|
||||
Number of concurrently pending promises returned by `mapper`.
|
||||
|
||||
##### stopOnError
|
||||
|
||||
Type: `boolean`\
|
||||
Default: `true`
|
||||
|
||||
When set to `false`, instead of stopping when a promise rejects, it will wait for all the promises to settle and then reject with an [aggregated error](https://github.com/sindresorhus/aggregate-error) containing all the errors from the rejected promises.
|
||||
|
||||
## p-map for enterprise
|
||||
|
||||
Available as part of the Tidelift Subscription.
|
||||
|
||||
The maintainers of p-map and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-p-map?utm_source=npm-p-map&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||
|
||||
## Related
|
||||
|
||||
- [p-all](https://github.com/sindresorhus/p-all) - Run promise-returning & async functions concurrently with optional limited concurrency
|
||||
- [p-filter](https://github.com/sindresorhus/p-filter) - Filter promises concurrently
|
||||
- [p-times](https://github.com/sindresorhus/p-times) - Run promise-returning & async functions a specific number of times concurrently
|
||||
- [p-props](https://github.com/sindresorhus/p-props) - Like `Promise.all()` but for `Map` and `Object`
|
||||
- [p-map-series](https://github.com/sindresorhus/p-map-series) - Map over promises serially
|
||||
- [p-queue](https://github.com/sindresorhus/p-queue) - Promise queue with concurrency control
|
||||
- [More…](https://github.com/sindresorhus/promise-fun)
|
84
node_modules/cacache/package.json
generated
vendored
Normal file
84
node_modules/cacache/package.json
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"name": "cacache",
|
||||
"version": "16.1.3",
|
||||
"cache-version": {
|
||||
"content": "2",
|
||||
"index": "5"
|
||||
},
|
||||
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"coverage": "tap",
|
||||
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
|
||||
"lint": "eslint \"**/*.js\"",
|
||||
"npmclilint": "npmcli-lint",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"postsnap": "npm run lintfix --",
|
||||
"postlint": "template-oss-check",
|
||||
"posttest": "npm run lint",
|
||||
"template-oss-apply": "template-oss-apply --force"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/cacache.git"
|
||||
},
|
||||
"keywords": [
|
||||
"cache",
|
||||
"caching",
|
||||
"content-addressable",
|
||||
"sri",
|
||||
"sri hash",
|
||||
"subresource integrity",
|
||||
"cache",
|
||||
"storage",
|
||||
"store",
|
||||
"file store",
|
||||
"filesystem",
|
||||
"disk cache",
|
||||
"disk storage"
|
||||
],
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@npmcli/fs": "^2.1.0",
|
||||
"@npmcli/move-file": "^2.0.0",
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.1.0",
|
||||
"glob": "^8.0.1",
|
||||
"infer-owner": "^1.0.4",
|
||||
"lru-cache": "^7.7.1",
|
||||
"minipass": "^3.1.6",
|
||||
"minipass-collect": "^1.0.2",
|
||||
"minipass-flush": "^1.0.5",
|
||||
"minipass-pipeline": "^1.2.4",
|
||||
"mkdirp": "^1.0.4",
|
||||
"p-map": "^4.0.0",
|
||||
"promise-inflight": "^1.0.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"ssri": "^9.0.0",
|
||||
"tar": "^6.1.11",
|
||||
"unique-filename": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^3.0.1",
|
||||
"@npmcli/template-oss": "3.5.0",
|
||||
"tap": "^16.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"windowsCI": false,
|
||||
"version": "3.5.0"
|
||||
},
|
||||
"author": "GitHub Inc."
|
||||
}
|
Reference in New Issue
Block a user