$
This commit is contained in:
29
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
29
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const contentVer = require('../../package.json')['cache-version'].content
|
||||
const hashToSegments = require('../util/hash-to-segments')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
// Current format of content file path:
|
||||
//
|
||||
// sha512-BaSE64Hex= ->
|
||||
// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
|
||||
//
|
||||
module.exports = contentPath
|
||||
|
||||
function contentPath (cache, integrity) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
// contentPath is the *strongest* algo given
|
||||
return path.join(
|
||||
contentDir(cache),
|
||||
sri.algorithm,
|
||||
...hashToSegments(sri.hexDigest())
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.contentDir = contentDir
|
||||
|
||||
function contentDir (cache) {
|
||||
return path.join(cache, `content-v${contentVer}`)
|
||||
}
|
241
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
241
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const ssri = require('ssri')
|
||||
const contentPath = require('./path')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
module.exports = read
|
||||
|
||||
const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
|
||||
async function read (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && stat.size !== size) {
|
||||
throw sizeError(size, stat.size)
|
||||
}
|
||||
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
const data = await fs.readFile(cpath, { encoding: null })
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const readPipeline = (cpath, size, sri, stream) => {
|
||||
stream.push(
|
||||
new fsm.ReadStream(cpath, {
|
||||
size,
|
||||
readSize: MAX_SINGLE_READ_SIZE,
|
||||
}),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size,
|
||||
})
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.sync = readSync
|
||||
|
||||
function readSync (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
const data = fs.readFileSync(cpath, { encoding: null })
|
||||
if (typeof size === 'number' && size !== data.length) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
}
|
||||
|
||||
throw integrityError(sri, cpath)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
|
||||
function readStream (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// just stat to ensure it exists
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && size !== stat.size) {
|
||||
return stream.emit('error', sizeError(size, stat.size))
|
||||
}
|
||||
|
||||
return readPipeline(cpath, stat.size, sri, stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
module.exports.copy.sync = copySync
|
||||
|
||||
function copy (cache, integrity, dest) {
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
function copySync (cache, integrity, dest) {
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFileSync(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
|
||||
async function hasContent (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
return await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
const stat = await fs.stat(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.hasContent.sync = hasContentSync
|
||||
|
||||
function hasContentSync (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
try {
|
||||
const stat = fs.statSync(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function withContentSri (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
// Can't use race here because a generic error can happen before
|
||||
// a ENOENT error, and can happen before a valid result
|
||||
const results = await Promise.all(digests.map(async (meta) => {
|
||||
try {
|
||||
return await withContentSri(cache, meta, fn)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
}
|
||||
}))
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
}
|
||||
}
|
||||
|
||||
function withContentSriSync (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
let lastErr = null
|
||||
for (const meta of digests) {
|
||||
try {
|
||||
return withContentSriSync(cache, meta, fn)
|
||||
} catch (err) {
|
||||
lastErr = err
|
||||
}
|
||||
}
|
||||
throw lastErr
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function integrityError (sri, path) {
|
||||
const err = new Error(`Integrity verification failed for ${sri} (${path})`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.sri = sri
|
||||
err.path = path
|
||||
return err
|
||||
}
|
20
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
20
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const { hasContent } = require('./read')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
|
||||
module.exports = rm
|
||||
|
||||
async function rm (cache, integrity) {
|
||||
const content = await hasContent(cache, integrity)
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
await rimraf(contentPath(cache, content.sri))
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
189
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
189
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
'use strict'
|
||||
|
||||
const events = require('events')
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fixOwner = require('../util/fix-owner')
|
||||
const fs = require('@npmcli/fs')
|
||||
const moveFile = require('../util/move-file')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
const Flush = require('minipass-flush')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const fsm = require('fs-minipass')
|
||||
|
||||
module.exports = write
|
||||
|
||||
async function write (cache, data, opts = {}) {
|
||||
const { algorithms, size, integrity } = opts
|
||||
if (algorithms && algorithms.length > 1) {
|
||||
throw new Error('opts.algorithms only supports a single algorithm for now')
|
||||
}
|
||||
|
||||
if (typeof size === 'number' && data.length !== size) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
throw checksumError(integrity, sri)
|
||||
}
|
||||
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
await fs.writeFile(tmp.target, data, { flag: 'wx' })
|
||||
await moveToDestination(tmp, cache, sri, opts)
|
||||
return { integrity: sri, size: data.length }
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
|
||||
// writes proxied to the 'inputStream' that is passed to the Promise
|
||||
// 'end' is deferred until content is handled.
|
||||
class CacacheWriteStream extends Flush {
|
||||
constructor (cache, opts) {
|
||||
super()
|
||||
this.opts = opts
|
||||
this.cache = cache
|
||||
this.inputStream = new Minipass()
|
||||
this.inputStream.on('error', er => this.emit('error', er))
|
||||
this.inputStream.on('drain', () => this.emit('drain'))
|
||||
this.handleContentP = null
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (!this.handleContentP) {
|
||||
this.handleContentP = handleContent(
|
||||
this.inputStream,
|
||||
this.cache,
|
||||
this.opts
|
||||
)
|
||||
}
|
||||
return this.inputStream.write(chunk, encoding, cb)
|
||||
}
|
||||
|
||||
flush (cb) {
|
||||
this.inputStream.end(() => {
|
||||
if (!this.handleContentP) {
|
||||
const e = new Error('Cache input stream was empty')
|
||||
e.code = 'ENODATA'
|
||||
// empty streams are probably emitting end right away.
|
||||
// defer this one tick by rejecting a promise on it.
|
||||
return Promise.reject(e).catch(cb)
|
||||
}
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
this.handleContentP.then(
|
||||
(res) => {
|
||||
res.integrity && this.emit('integrity', res.integrity)
|
||||
// eslint-disable-next-line promise/always-return
|
||||
res.size !== null && this.emit('size', res.size)
|
||||
cb()
|
||||
},
|
||||
(er) => cb(er)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (cache, opts = {}) {
|
||||
return new CacacheWriteStream(cache, opts)
|
||||
}
|
||||
|
||||
async function handleContent (inputStream, cache, opts) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
await moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
)
|
||||
return res
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx',
|
||||
})
|
||||
|
||||
if (opts.integrityEmitter) {
|
||||
// we need to create these all simultaneously since they can fire in any order
|
||||
const [integrity, size] = await Promise.all([
|
||||
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
|
||||
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
|
||||
new Pipeline(inputStream, outStream).promise(),
|
||||
])
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size,
|
||||
})
|
||||
hashStream.on('integrity', i => {
|
||||
integrity = i
|
||||
})
|
||||
hashStream.on('size', s => {
|
||||
size = s
|
||||
})
|
||||
|
||||
const pipeline = new Pipeline(inputStream, hashStream, outStream)
|
||||
await pipeline.promise()
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
async function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fixOwner.mkdirfix(cache, path.dirname(tmpTarget))
|
||||
return {
|
||||
target: tmpTarget,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
async function moveToDestination (tmp, cache, sri, opts) {
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
|
||||
await fixOwner.mkdirfix(cache, destDir)
|
||||
await moveFile(tmp.target, destination)
|
||||
tmp.moved = true
|
||||
await fixOwner.chownr(cache, destination)
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function checksumError (expected, found) {
|
||||
const err = new Error(`Integrity check failed:
|
||||
Wanted: ${expected}
|
||||
Found: ${found}`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
return err
|
||||
}
|
Reference in New Issue
Block a user