This commit is contained in:
lalBi94
2023-03-05 13:23:23 +01:00
commit 7bc56c09b5
14034 changed files with 1834369 additions and 0 deletions

131
node_modules/readable-stream/lib/_stream_duplex.js generated vendored Normal file
View File

@@ -0,0 +1,131 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) {
keys.push(key);
}return keys;
};
/*</replacement>*/
module.exports = Duplex;
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
{
// avoid scope creep, the keys array can then be collected
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
}
function Duplex(options) {
if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false) this.readable = false;
if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
pna.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex.prototype, 'destroyed', {
get: function () {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});
Duplex.prototype._destroy = function (err, cb) {
this.push(null);
this.end();
pna.nextTick(cb, err);
};

View File

@@ -0,0 +1,47 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};

1019
node_modules/readable-stream/lib/_stream_readable.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

214
node_modules/readable-stream/lib/_stream_transform.js generated vendored Normal file
View File

@@ -0,0 +1,214 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = Object.create(require('core-util-is'));
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function afterTransform(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb) {
return this.emit('error', new Error('write callback called multiple times'));
}
ts.writechunk = null;
ts.writecb = null;
if (data != null) // single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = {
afterTransform: afterTransform.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
};
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish);
}
function prefinish() {
var _this = this;
if (typeof this._flush === 'function') {
this._flush(function (er, data) {
done(_this, er, data);
});
} else {
done(this, null, null);
}
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('_transform() is not implemented');
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform.prototype._destroy = function (err, cb) {
var _this2 = this;
Duplex.prototype._destroy.call(this, err, function (err2) {
cb(err2);
_this2.emit('close');
});
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null) // single equals check for both `null` and `undefined`
stream.push(data);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
}

687
node_modules/readable-stream/lib/_stream_writable.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,79 @@
'use strict';
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Buffer = require('safe-buffer').Buffer;
var util = require('util');
function copyBuffer(src, target, offset) {
src.copy(target, offset);
}
module.exports = function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
BufferList.prototype.push = function push(v) {
var entry = { data: v, next: null };
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
};
BufferList.prototype.unshift = function unshift(v) {
var entry = { data: v, next: this.head };
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
};
BufferList.prototype.shift = function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
};
BufferList.prototype.clear = function clear() {
this.head = this.tail = null;
this.length = 0;
};
BufferList.prototype.join = function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}return ret;
};
BufferList.prototype.concat = function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
if (this.length === 1) return this.head.data;
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
};
return BufferList;
}();
if (util && util.inspect && util.inspect.custom) {
module.exports.prototype[util.inspect.custom] = function () {
var obj = util.inspect({ length: this.length });
return this.constructor.name + ' ' + obj;
};
}

View File

@@ -0,0 +1,74 @@
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
// undocumented cb() API, needed for core, not for public API
function destroy(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
pna.nextTick(emitErrorNT, this, err);
}
return this;
}
// we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
}
// if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
pna.nextTick(emitErrorNT, _this, err);
if (_this._writableState) {
_this._writableState.errorEmitted = true;
}
} else if (cb) {
cb(err);
}
});
return this;
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
module.exports = {
destroy: destroy,
undestroy: undestroy
};

View File

@@ -0,0 +1 @@
module.exports = require('events').EventEmitter;

View File

@@ -0,0 +1 @@
module.exports = require('stream');