This commit is contained in:
lalBi94
2023-03-05 13:23:23 +01:00
commit 7bc56c09b5
14034 changed files with 1834369 additions and 0 deletions

44
node_modules/webpack/lib/util/ArrayHelpers.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* Compare two arrays or strings by performing strict equality check for each value.
* @template T [T=any]
* @param {ArrayLike<T>} a Array of values to be compared
* @param {ArrayLike<T>} b Array of values to be compared
* @returns {boolean} returns true if all the elements of passed arrays are strictly equal.
*/
exports.equals = (a, b) => {
if (a.length !== b.length) return false;
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false;
}
return true;
};
/**
* Partition an array by calling a predicate function on each value.
* @template T [T=any]
* @param {Array<T>} arr Array of values to be partitioned
* @param {(value: T) => boolean} fn Partition function which partitions based on truthiness of result.
* @returns {[Array<T>, Array<T>]} returns the values of `arr` partitioned into two new arrays based on fn predicate.
*/
exports.groupBy = (arr = [], fn) => {
return arr.reduce(
/**
* @param {[Array<T>, Array<T>]} groups An accumulator storing already partitioned values returned from previous call.
* @param {T} value The value of the current element
* @returns {[Array<T>, Array<T>]} returns an array of partitioned groups accumulator resulting from calling a predicate on the current value.
*/
(groups, value) => {
groups[fn(value) ? 0 : 1].push(value);
return groups;
},
[[], []]
);
};

111
node_modules/webpack/lib/util/ArrayQueue.js generated vendored Normal file
View File

@@ -0,0 +1,111 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
*/
class ArrayQueue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/** @private @type {T[]} */
this._list = items ? Array.from(items) : [];
/** @private @type {T[]} */
this._listReversed = [];
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._list.length + this._listReversed.length;
}
/**
* Empties the queue.
*/
clear() {
this._list.length = 0;
this._listReversed.length = 0;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(item) {
this._list.push(item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
if (this._listReversed.length === 0) {
if (this._list.length === 0) return undefined;
if (this._list.length === 1) return this._list.pop();
if (this._list.length < 16) return this._list.shift();
const temp = this._listReversed;
this._listReversed = this._list;
this._listReversed.reverse();
this._list = temp;
}
return this._listReversed.pop();
}
/**
* Finds and removes an item
* @param {T} item the item
* @returns {void}
*/
delete(item) {
const i = this._list.indexOf(item);
if (i >= 0) {
this._list.splice(i, 1);
} else {
const i = this._listReversed.indexOf(item);
if (i >= 0) this._listReversed.splice(i, 1);
}
}
[Symbol.iterator]() {
let i = -1;
let reversed = false;
return {
next: () => {
if (!reversed) {
i++;
if (i < this._list.length) {
return {
done: false,
value: this._list[i]
};
}
reversed = true;
i = this._listReversed.length;
}
i--;
if (i < 0) {
return {
done: true,
value: undefined
};
}
return {
done: false,
value: this._listReversed[i]
};
}
};
}
}
module.exports = ArrayQueue;

373
node_modules/webpack/lib/util/AsyncQueue.js generated vendored Normal file
View File

@@ -0,0 +1,373 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { SyncHook, AsyncSeriesHook } = require("tapable");
const { makeWebpackError } = require("../HookWebpackError");
const WebpackError = require("../WebpackError");
const ArrayQueue = require("./ArrayQueue");
const QUEUED_STATE = 0;
const PROCESSING_STATE = 1;
const DONE_STATE = 2;
let inHandleResult = 0;
/**
* @template T
* @callback Callback
* @param {(WebpackError | null)=} err
* @param {T=} result
*/
/**
* @template T
* @template K
* @template R
*/
class AsyncQueueEntry {
/**
* @param {T} item the item
* @param {Callback<R>} callback the callback
*/
constructor(item, callback) {
this.item = item;
/** @type {typeof QUEUED_STATE | typeof PROCESSING_STATE | typeof DONE_STATE} */
this.state = QUEUED_STATE;
this.callback = callback;
/** @type {Callback<R>[] | undefined} */
this.callbacks = undefined;
this.result = undefined;
/** @type {WebpackError | undefined} */
this.error = undefined;
}
}
/**
* @template T
* @template K
* @template R
*/
class AsyncQueue {
/**
* @param {Object} options options object
* @param {string=} options.name name of the queue
* @param {number=} options.parallelism how many items should be processed at once
* @param {AsyncQueue<any, any, any>=} options.parent parent queue, which will have priority over this queue and with shared parallelism
* @param {function(T): K=} options.getKey extract key from item
* @param {function(T, Callback<R>): void} options.processor async function to process items
*/
constructor({ name, parallelism, parent, processor, getKey }) {
this._name = name;
this._parallelism = parallelism || 1;
this._processor = processor;
this._getKey =
getKey || /** @type {(T) => K} */ (item => /** @type {any} */ (item));
/** @type {Map<K, AsyncQueueEntry<T, K, R>>} */
this._entries = new Map();
/** @type {ArrayQueue<AsyncQueueEntry<T, K, R>>} */
this._queued = new ArrayQueue();
/** @type {AsyncQueue<any, any, any>[]} */
this._children = undefined;
this._activeTasks = 0;
this._willEnsureProcessing = false;
this._needProcessing = false;
this._stopped = false;
this._root = parent ? parent._root : this;
if (parent) {
if (this._root._children === undefined) {
this._root._children = [this];
} else {
this._root._children.push(this);
}
}
this.hooks = {
/** @type {AsyncSeriesHook<[T]>} */
beforeAdd: new AsyncSeriesHook(["item"]),
/** @type {SyncHook<[T]>} */
added: new SyncHook(["item"]),
/** @type {AsyncSeriesHook<[T]>} */
beforeStart: new AsyncSeriesHook(["item"]),
/** @type {SyncHook<[T]>} */
started: new SyncHook(["item"]),
/** @type {SyncHook<[T, Error, R]>} */
result: new SyncHook(["item", "error", "result"])
};
this._ensureProcessing = this._ensureProcessing.bind(this);
}
/**
* @param {T} item an item
* @param {Callback<R>} callback callback function
* @returns {void}
*/
add(item, callback) {
if (this._stopped) return callback(new WebpackError("Queue was stopped"));
this.hooks.beforeAdd.callAsync(item, err => {
if (err) {
callback(
makeWebpackError(err, `AsyncQueue(${this._name}).hooks.beforeAdd`)
);
return;
}
const key = this._getKey(item);
const entry = this._entries.get(key);
if (entry !== undefined) {
if (entry.state === DONE_STATE) {
if (inHandleResult++ > 3) {
process.nextTick(() => callback(entry.error, entry.result));
} else {
callback(entry.error, entry.result);
}
inHandleResult--;
} else if (entry.callbacks === undefined) {
entry.callbacks = [callback];
} else {
entry.callbacks.push(callback);
}
return;
}
const newEntry = new AsyncQueueEntry(item, callback);
if (this._stopped) {
this.hooks.added.call(item);
this._root._activeTasks++;
process.nextTick(() =>
this._handleResult(newEntry, new WebpackError("Queue was stopped"))
);
} else {
this._entries.set(key, newEntry);
this._queued.enqueue(newEntry);
const root = this._root;
root._needProcessing = true;
if (root._willEnsureProcessing === false) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
this.hooks.added.call(item);
}
});
}
/**
* @param {T} item an item
* @returns {void}
*/
invalidate(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
this._entries.delete(key);
if (entry.state === QUEUED_STATE) {
this._queued.delete(entry);
}
}
/**
* Waits for an already started item
* @param {T} item an item
* @param {Callback<R>} callback callback function
* @returns {void}
*/
waitFor(item, callback) {
const key = this._getKey(item);
const entry = this._entries.get(key);
if (entry === undefined) {
return callback(
new WebpackError(
"waitFor can only be called for an already started item"
)
);
}
if (entry.state === DONE_STATE) {
process.nextTick(() => callback(entry.error, entry.result));
} else if (entry.callbacks === undefined) {
entry.callbacks = [callback];
} else {
entry.callbacks.push(callback);
}
}
/**
* @returns {void}
*/
stop() {
this._stopped = true;
const queue = this._queued;
this._queued = new ArrayQueue();
const root = this._root;
for (const entry of queue) {
this._entries.delete(this._getKey(entry.item));
root._activeTasks++;
this._handleResult(entry, new WebpackError("Queue was stopped"));
}
}
/**
* @returns {void}
*/
increaseParallelism() {
const root = this._root;
root._parallelism++;
/* istanbul ignore next */
if (root._willEnsureProcessing === false && root._needProcessing) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
}
/**
* @returns {void}
*/
decreaseParallelism() {
const root = this._root;
root._parallelism--;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently being processed
*/
isProcessing(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === PROCESSING_STATE;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently queued
*/
isQueued(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === QUEUED_STATE;
}
/**
* @param {T} item an item
* @returns {boolean} true, if the item is currently queued
*/
isDone(item) {
const key = this._getKey(item);
const entry = this._entries.get(key);
return entry !== undefined && entry.state === DONE_STATE;
}
/**
* @returns {void}
*/
_ensureProcessing() {
while (this._activeTasks < this._parallelism) {
const entry = this._queued.dequeue();
if (entry === undefined) break;
this._activeTasks++;
entry.state = PROCESSING_STATE;
this._startProcessing(entry);
}
this._willEnsureProcessing = false;
if (this._queued.length > 0) return;
if (this._children !== undefined) {
for (const child of this._children) {
while (this._activeTasks < this._parallelism) {
const entry = child._queued.dequeue();
if (entry === undefined) break;
this._activeTasks++;
entry.state = PROCESSING_STATE;
child._startProcessing(entry);
}
if (child._queued.length > 0) return;
}
}
if (!this._willEnsureProcessing) this._needProcessing = false;
}
/**
* @param {AsyncQueueEntry<T, K, R>} entry the entry
* @returns {void}
*/
_startProcessing(entry) {
this.hooks.beforeStart.callAsync(entry.item, err => {
if (err) {
this._handleResult(
entry,
makeWebpackError(err, `AsyncQueue(${this._name}).hooks.beforeStart`)
);
return;
}
let inCallback = false;
try {
this._processor(entry.item, (e, r) => {
inCallback = true;
this._handleResult(entry, e, r);
});
} catch (err) {
if (inCallback) throw err;
this._handleResult(entry, err, null);
}
this.hooks.started.call(entry.item);
});
}
/**
* @param {AsyncQueueEntry<T, K, R>} entry the entry
* @param {WebpackError=} err error, if any
* @param {R=} result result, if any
* @returns {void}
*/
_handleResult(entry, err, result) {
this.hooks.result.callAsync(entry.item, err, result, hookError => {
const error = hookError
? makeWebpackError(hookError, `AsyncQueue(${this._name}).hooks.result`)
: err;
const callback = entry.callback;
const callbacks = entry.callbacks;
entry.state = DONE_STATE;
entry.callback = undefined;
entry.callbacks = undefined;
entry.result = result;
entry.error = error;
const root = this._root;
root._activeTasks--;
if (root._willEnsureProcessing === false && root._needProcessing) {
root._willEnsureProcessing = true;
setImmediate(root._ensureProcessing);
}
if (inHandleResult++ > 3) {
process.nextTick(() => {
callback(error, result);
if (callbacks !== undefined) {
for (const callback of callbacks) {
callback(error, result);
}
}
});
} else {
callback(error, result);
if (callbacks !== undefined) {
for (const callback of callbacks) {
callback(error, result);
}
}
}
inHandleResult--;
});
}
clear() {
this._entries.clear();
this._queued.clear();
this._activeTasks = 0;
this._willEnsureProcessing = false;
this._needProcessing = false;
this._stopped = false;
}
}
module.exports = AsyncQueue;

35
node_modules/webpack/lib/util/Hash.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class Hash {
/* istanbul ignore next */
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
const AbstractMethodError = require("../AbstractMethodError");
throw new AbstractMethodError();
}
/* istanbul ignore next */
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
const AbstractMethodError = require("../AbstractMethodError");
throw new AbstractMethodError();
}
}
module.exports = Hash;

46
node_modules/webpack/lib/util/IterableHelpers.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
* @param {Iterable<T>} set a set
* @returns {T | undefined} last item
*/
const last = set => {
let last;
for (const item of set) last = item;
return last;
};
/**
* @template T
* @param {Iterable<T>} iterable iterable
* @param {function(T): boolean} filter predicate
* @returns {boolean} true, if some items match the filter predicate
*/
const someInIterable = (iterable, filter) => {
for (const item of iterable) {
if (filter(item)) return true;
}
return false;
};
/**
* @template T
* @param {Iterable<T>} iterable an iterable
* @returns {number} count of items
*/
const countIterable = iterable => {
let i = 0;
// eslint-disable-next-line no-unused-vars
for (const _ of iterable) i++;
return i;
};
exports.last = last;
exports.someInIterable = someInIterable;
exports.countIterable = countIterable;

236
node_modules/webpack/lib/util/LazyBucketSortedSet.js generated vendored Normal file
View File

@@ -0,0 +1,236 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { first } = require("./SetHelpers");
const SortableSet = require("./SortableSet");
/**
* Multi layer bucket sorted set:
* Supports adding non-existing items (DO NOT ADD ITEM TWICE),
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET),
* Supports popping the first items according to defined order,
* Supports iterating all items without order,
* Supports updating an item in an efficient way,
* Supports size property, which is the number of items,
* Items are lazy partially sorted when needed
* @template T
* @template K
*/
class LazyBucketSortedSet {
/**
* @param {function(T): K} getKey function to get key from item
* @param {function(K, K): number} comparator comparator to sort keys
* @param {...((function(T): any) | (function(any, any): number))} args more pairs of getKey and comparator plus optional final comparator for the last layer
*/
constructor(getKey, comparator, ...args) {
this._getKey = getKey;
this._innerArgs = args;
this._leaf = args.length <= 1;
this._keys = new SortableSet(undefined, comparator);
/** @type {Map<K, LazyBucketSortedSet<T, any> | SortableSet<T>>} */
this._map = new Map();
this._unsortedItems = new Set();
this.size = 0;
}
/**
* @param {T} item an item
* @returns {void}
*/
add(item) {
this.size++;
this._unsortedItems.add(item);
}
/**
* @param {K} key key of item
* @param {T} item the item
* @returns {void}
*/
_addInternal(key, item) {
let entry = this._map.get(key);
if (entry === undefined) {
entry = this._leaf
? new SortableSet(undefined, this._innerArgs[0])
: new /** @type {any} */ (LazyBucketSortedSet)(...this._innerArgs);
this._keys.add(key);
this._map.set(key, entry);
}
entry.add(item);
}
/**
* @param {T} item an item
* @returns {void}
*/
delete(item) {
this.size--;
if (this._unsortedItems.has(item)) {
this._unsortedItems.delete(item);
return;
}
const key = this._getKey(item);
const entry = this._map.get(key);
entry.delete(item);
if (entry.size === 0) {
this._deleteKey(key);
}
}
/**
* @param {K} key key to be removed
* @returns {void}
*/
_deleteKey(key) {
this._keys.delete(key);
this._map.delete(key);
}
/**
* @returns {T | undefined} an item
*/
popFirst() {
if (this.size === 0) return undefined;
this.size--;
if (this._unsortedItems.size > 0) {
for (const item of this._unsortedItems) {
const key = this._getKey(item);
this._addInternal(key, item);
}
this._unsortedItems.clear();
}
this._keys.sort();
const key = first(this._keys);
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
leafEntry.sort();
const item = first(leafEntry);
leafEntry.delete(item);
if (leafEntry.size === 0) {
this._deleteKey(key);
}
return item;
} else {
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
const item = nodeEntry.popFirst();
if (nodeEntry.size === 0) {
this._deleteKey(key);
}
return item;
}
}
/**
* @param {T} item to be updated item
* @returns {function(true=): void} finish update
*/
startUpdate(item) {
if (this._unsortedItems.has(item)) {
return remove => {
if (remove) {
this._unsortedItems.delete(item);
this.size--;
return;
}
};
}
const key = this._getKey(item);
if (this._leaf) {
const oldEntry = /** @type {SortableSet<T>} */ (this._map.get(key));
return remove => {
if (remove) {
this.size--;
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
// This flags the sortable set as unordered
oldEntry.add(item);
} else {
oldEntry.delete(item);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
} else {
const oldEntry = /** @type {LazyBucketSortedSet<T, any>} */ (
this._map.get(key)
);
const finishUpdate = oldEntry.startUpdate(item);
return remove => {
if (remove) {
this.size--;
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
return;
}
const newKey = this._getKey(item);
if (key === newKey) {
finishUpdate();
} else {
finishUpdate(true);
if (oldEntry.size === 0) {
this._deleteKey(key);
}
this._addInternal(newKey, item);
}
};
}
}
/**
* @param {Iterator<T>[]} iterators list of iterators to append to
* @returns {void}
*/
_appendIterators(iterators) {
if (this._unsortedItems.size > 0)
iterators.push(this._unsortedItems[Symbol.iterator]());
for (const key of this._keys) {
const entry = this._map.get(key);
if (this._leaf) {
const leafEntry = /** @type {SortableSet<T>} */ (entry);
const iterator = leafEntry[Symbol.iterator]();
iterators.push(iterator);
} else {
const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
nodeEntry._appendIterators(iterators);
}
}
}
/**
* @returns {Iterator<T>} the iterator
*/
[Symbol.iterator]() {
const iterators = [];
this._appendIterators(iterators);
iterators.reverse();
let currentIterator = iterators.pop();
return {
next: () => {
const res = currentIterator.next();
if (res.done) {
if (iterators.length === 0) return res;
currentIterator = iterators.pop();
return currentIterator.next();
}
return res;
}
};
}
}
module.exports = LazyBucketSortedSet;

208
node_modules/webpack/lib/util/LazySet.js generated vendored Normal file
View File

@@ -0,0 +1,208 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const makeSerializable = require("./makeSerializable.js");
/**
* @template T
* @param {Set<T>} targetSet set where items should be added
* @param {Set<Iterable<T>>} toMerge iterables to be merged
* @returns {void}
*/
const merge = (targetSet, toMerge) => {
for (const set of toMerge) {
for (const item of set) {
targetSet.add(item);
}
}
};
/**
* @template T
* @param {Set<Iterable<T>>} targetSet set where iterables should be added
* @param {Array<LazySet<T>>} toDeepMerge lazy sets to be flattened
* @returns {void}
*/
const flatten = (targetSet, toDeepMerge) => {
for (const set of toDeepMerge) {
if (set._set.size > 0) targetSet.add(set._set);
if (set._needMerge) {
for (const mergedSet of set._toMerge) {
targetSet.add(mergedSet);
}
flatten(targetSet, set._toDeepMerge);
}
}
};
/**
* Like Set but with an addAll method to eventually add items from another iterable.
* Access methods make sure that all delayed operations are executed.
* Iteration methods deopts to normal Set performance until clear is called again (because of the chance of modifications during iteration).
* @template T
*/
class LazySet {
/**
* @param {Iterable<T>=} iterable init iterable
*/
constructor(iterable) {
/** @type {Set<T>} */
this._set = new Set(iterable);
/** @type {Set<Iterable<T>>} */
this._toMerge = new Set();
/** @type {Array<LazySet<T>>} */
this._toDeepMerge = [];
this._needMerge = false;
this._deopt = false;
}
_flatten() {
flatten(this._toMerge, this._toDeepMerge);
this._toDeepMerge.length = 0;
}
_merge() {
this._flatten();
merge(this._set, this._toMerge);
this._toMerge.clear();
this._needMerge = false;
}
_isEmpty() {
return (
this._set.size === 0 &&
this._toMerge.size === 0 &&
this._toDeepMerge.length === 0
);
}
get size() {
if (this._needMerge) this._merge();
return this._set.size;
}
/**
* @param {T} item an item
* @returns {this} itself
*/
add(item) {
this._set.add(item);
return this;
}
/**
* @param {Iterable<T> | LazySet<T>} iterable a immutable iterable or another immutable LazySet which will eventually be merged into the Set
* @returns {this} itself
*/
addAll(iterable) {
if (this._deopt) {
const _set = this._set;
for (const item of iterable) {
_set.add(item);
}
} else {
if (iterable instanceof LazySet) {
if (iterable._isEmpty()) return this;
this._toDeepMerge.push(iterable);
this._needMerge = true;
if (this._toDeepMerge.length > 100000) {
this._flatten();
}
} else {
this._toMerge.add(iterable);
this._needMerge = true;
}
if (this._toMerge.size > 100000) this._merge();
}
return this;
}
clear() {
this._set.clear();
this._toMerge.clear();
this._toDeepMerge.length = 0;
this._needMerge = false;
this._deopt = false;
}
/**
* @param {T} value an item
* @returns {boolean} true, if the value was in the Set before
*/
delete(value) {
if (this._needMerge) this._merge();
return this._set.delete(value);
}
entries() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.entries();
}
/**
* @param {function(T, T, Set<T>): void} callbackFn function called for each entry
* @param {any} thisArg this argument for the callbackFn
* @returns {void}
*/
forEach(callbackFn, thisArg) {
this._deopt = true;
if (this._needMerge) this._merge();
this._set.forEach(callbackFn, thisArg);
}
/**
* @param {T} item an item
* @returns {boolean} true, when the item is in the Set
*/
has(item) {
if (this._needMerge) this._merge();
return this._set.has(item);
}
keys() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.keys();
}
values() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set.values();
}
[Symbol.iterator]() {
this._deopt = true;
if (this._needMerge) this._merge();
return this._set[Symbol.iterator]();
}
/* istanbul ignore next */
get [Symbol.toStringTag]() {
return "LazySet";
}
serialize({ write }) {
if (this._needMerge) this._merge();
write(this._set.size);
for (const item of this._set) write(item);
}
static deserialize({ read }) {
const count = read();
const items = [];
for (let i = 0; i < count; i++) {
items.push(read());
}
return new LazySet(items);
}
}
makeSerializable(LazySet, "webpack/lib/util/LazySet");
module.exports = LazySet;

22
node_modules/webpack/lib/util/MapHelpers.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template K
* @template V
* @param {Map<K, V>} map a map
* @param {K} key the key
* @param {function(): V} computer compute value
* @returns {V} value
*/
exports.provide = (map, key, computer) => {
const value = map.get(key);
if (value !== undefined) return value;
const newValue = computer();
map.set(key, newValue);
return newValue;
};

View File

@@ -0,0 +1,59 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const binarySearchBounds = require("../util/binarySearchBounds");
class ParallelismFactorCalculator {
constructor() {
this._rangePoints = [];
this._rangeCallbacks = [];
}
range(start, end, callback) {
if (start === end) return callback(1);
this._rangePoints.push(start);
this._rangePoints.push(end);
this._rangeCallbacks.push(callback);
}
calculate() {
const segments = Array.from(new Set(this._rangePoints)).sort((a, b) =>
a < b ? -1 : 1
);
const parallelism = segments.map(() => 0);
const rangeStartIndices = [];
for (let i = 0; i < this._rangePoints.length; i += 2) {
const start = this._rangePoints[i];
const end = this._rangePoints[i + 1];
let idx = binarySearchBounds.eq(segments, start);
rangeStartIndices.push(idx);
do {
parallelism[idx]++;
idx++;
} while (segments[idx] < end);
}
for (let i = 0; i < this._rangeCallbacks.length; i++) {
const start = this._rangePoints[i * 2];
const end = this._rangePoints[i * 2 + 1];
let idx = rangeStartIndices[i];
let sum = 0;
let totalDuration = 0;
let current = start;
do {
const p = parallelism[idx];
idx++;
const duration = segments[idx] - current;
totalDuration += duration;
current = segments[idx];
sum += p * duration;
} while (current < end);
this._rangeCallbacks[i](sum / totalDuration);
}
}
}
module.exports = ParallelismFactorCalculator;

51
node_modules/webpack/lib/util/Queue.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
*/
class Queue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/** @private @type {Set<T>} */
this._set = new Set(items);
/** @private @type {Iterator<T>} */
this._iterator = this._set[Symbol.iterator]();
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._set.size;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(item) {
this._set.add(item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
const result = this._iterator.next();
if (result.done) return undefined;
this._set.delete(result.value);
return result.value;
}
}
module.exports = Queue;

54
node_modules/webpack/lib/util/Semaphore.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class Semaphore {
/**
* Creates an instance of Semaphore.
*
* @param {number} available the amount available number of "tasks"
* in the Semaphore
*/
constructor(available) {
this.available = available;
/** @type {(function(): void)[]} */
this.waiters = [];
/** @private */
this._continue = this._continue.bind(this);
}
/**
* @param {function(): void} callback function block to capture and run
* @returns {void}
*/
acquire(callback) {
if (this.available > 0) {
this.available--;
callback();
} else {
this.waiters.push(callback);
}
}
release() {
this.available++;
if (this.waiters.length > 0) {
process.nextTick(this._continue);
}
}
_continue() {
if (this.available > 0) {
if (this.waiters.length > 0) {
this.available--;
const callback = this.waiters.pop();
callback();
}
}
}
}
module.exports = Semaphore;

94
node_modules/webpack/lib/util/SetHelpers.js generated vendored Normal file
View File

@@ -0,0 +1,94 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* intersect creates Set containing the intersection of elements between all sets
* @template T
* @param {Set<T>[]} sets an array of sets being checked for shared elements
* @returns {Set<T>} returns a new Set containing the intersecting items
*/
const intersect = sets => {
if (sets.length === 0) return new Set();
if (sets.length === 1) return new Set(sets[0]);
let minSize = Infinity;
let minIndex = -1;
for (let i = 0; i < sets.length; i++) {
const size = sets[i].size;
if (size < minSize) {
minIndex = i;
minSize = size;
}
}
const current = new Set(sets[minIndex]);
for (let i = 0; i < sets.length; i++) {
if (i === minIndex) continue;
const set = sets[i];
for (const item of current) {
if (!set.has(item)) {
current.delete(item);
}
}
}
return current;
};
/**
* Checks if a set is the subset of another set
* @template T
* @param {Set<T>} bigSet a Set which contains the original elements to compare against
* @param {Set<T>} smallSet the set whose elements might be contained inside of bigSet
* @returns {boolean} returns true if smallSet contains all elements inside of the bigSet
*/
const isSubset = (bigSet, smallSet) => {
if (bigSet.size < smallSet.size) return false;
for (const item of smallSet) {
if (!bigSet.has(item)) return false;
}
return true;
};
/**
* @template T
* @param {Set<T>} set a set
* @param {function(T): boolean} fn selector function
* @returns {T | undefined} found item
*/
const find = (set, fn) => {
for (const item of set) {
if (fn(item)) return item;
}
};
/**
* @template T
* @param {Set<T>} set a set
* @returns {T | undefined} first item
*/
const first = set => {
const entry = set.values().next();
return entry.done ? undefined : entry.value;
};
/**
* @template T
* @param {Set<T>} a first
* @param {Set<T>} b second
* @returns {Set<T>} combined set, may be identical to a or b
*/
const combine = (a, b) => {
if (b.size === 0) return a;
if (a.size === 0) return b;
const set = new Set(a);
for (const item of b) set.add(item);
return set;
};
exports.intersect = intersect;
exports.isSubset = isSubset;
exports.find = find;
exports.first = first;
exports.combine = combine;

160
node_modules/webpack/lib/util/SortableSet.js generated vendored Normal file
View File

@@ -0,0 +1,160 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const NONE = Symbol("not sorted");
/**
* A subset of Set that offers sorting functionality
* @template T item type in set
* @extends {Set<T>}
*/
class SortableSet extends Set {
/**
* Create a new sortable set
* @param {Iterable<T>=} initialIterable The initial iterable value
* @typedef {function(T, T): number} SortFunction
* @param {SortFunction=} defaultSort Default sorting function
*/
constructor(initialIterable, defaultSort) {
super(initialIterable);
/** @private @type {undefined | function(T, T): number}} */
this._sortFn = defaultSort;
/** @private @type {typeof NONE | undefined | function(T, T): number}} */
this._lastActiveSortFn = NONE;
/** @private @type {Map<Function, any> | undefined} */
this._cache = undefined;
/** @private @type {Map<Function, any> | undefined} */
this._cacheOrderIndependent = undefined;
}
/**
* @param {T} value value to add to set
* @returns {this} returns itself
*/
add(value) {
this._lastActiveSortFn = NONE;
this._invalidateCache();
this._invalidateOrderedCache();
super.add(value);
return this;
}
/**
* @param {T} value value to delete
* @returns {boolean} true if value existed in set, false otherwise
*/
delete(value) {
this._invalidateCache();
this._invalidateOrderedCache();
return super.delete(value);
}
/**
* @returns {void}
*/
clear() {
this._invalidateCache();
this._invalidateOrderedCache();
return super.clear();
}
/**
* Sort with a comparer function
* @param {SortFunction} sortFn Sorting comparer function
* @returns {void}
*/
sortWith(sortFn) {
if (this.size <= 1 || sortFn === this._lastActiveSortFn) {
// already sorted - nothing to do
return;
}
const sortedArray = Array.from(this).sort(sortFn);
super.clear();
for (let i = 0; i < sortedArray.length; i += 1) {
super.add(sortedArray[i]);
}
this._lastActiveSortFn = sortFn;
this._invalidateCache();
}
sort() {
this.sortWith(this._sortFn);
return this;
}
/**
* Get data from cache
* @template R
* @param {function(SortableSet<T>): R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromCache(fn) {
if (this._cache === undefined) {
this._cache = new Map();
} else {
const result = this._cache.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cache.set(fn, newData);
return newData;
}
/**
* Get data from cache (ignoring sorting)
* @template R
* @param {function(SortableSet<T>): R} fn function to calculate value
* @returns {R} returns result of fn(this), cached until set changes
*/
getFromUnorderedCache(fn) {
if (this._cacheOrderIndependent === undefined) {
this._cacheOrderIndependent = new Map();
} else {
const result = this._cacheOrderIndependent.get(fn);
const data = /** @type {R} */ (result);
if (data !== undefined) {
return data;
}
}
const newData = fn(this);
this._cacheOrderIndependent.set(fn, newData);
return newData;
}
/**
* @private
* @returns {void}
*/
_invalidateCache() {
if (this._cache !== undefined) {
this._cache.clear();
}
}
/**
* @private
* @returns {void}
*/
_invalidateOrderedCache() {
if (this._cacheOrderIndependent !== undefined) {
this._cacheOrderIndependent.clear();
}
}
/**
* @returns {T[]} the raw array
*/
toJSON() {
return Array.from(this);
}
}
module.exports = SortableSet;

110
node_modules/webpack/lib/util/StackedCacheMap.js generated vendored Normal file
View File

@@ -0,0 +1,110 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template K
* @template V
*/
class StackedCacheMap {
constructor() {
/** @type {Map<K, V>} */
this.map = new Map();
/** @type {ReadonlyMap<K, V>[]} */
this.stack = [];
}
/**
* @param {ReadonlyMap<K, V>} map map to add
* @param {boolean} immutable if 'map' is immutable and StackedCacheMap can keep referencing it
*/
addAll(map, immutable) {
if (immutable) {
this.stack.push(map);
// largest map should go first
for (let i = this.stack.length - 1; i > 0; i--) {
const beforeLast = this.stack[i - 1];
if (beforeLast.size >= map.size) break;
this.stack[i] = beforeLast;
this.stack[i - 1] = map;
}
} else {
for (const [key, value] of map) {
this.map.set(key, value);
}
}
}
/**
* @param {K} item the key of the element to add
* @param {V} value the value of the element to add
* @returns {void}
*/
set(item, value) {
this.map.set(item, value);
}
/**
* @param {K} item the item to delete
* @returns {void}
*/
delete(item) {
throw new Error("Items can't be deleted from a StackedCacheMap");
}
/**
* @param {K} item the item to test
* @returns {boolean} true if the item exists in this set
*/
has(item) {
throw new Error(
"Checking StackedCacheMap.has before reading is inefficient, use StackedCacheMap.get and check for undefined"
);
}
/**
* @param {K} item the key of the element to return
* @returns {V} the value of the element
*/
get(item) {
for (const map of this.stack) {
const value = map.get(item);
if (value !== undefined) return value;
}
return this.map.get(item);
}
clear() {
this.stack.length = 0;
this.map.clear();
}
get size() {
let size = this.map.size;
for (const map of this.stack) {
size += map.size;
}
return size;
}
[Symbol.iterator]() {
const iterators = this.stack.map(map => map[Symbol.iterator]());
let current = this.map[Symbol.iterator]();
return {
next() {
let result = current.next();
while (result.done && iterators.length > 0) {
current = iterators.pop();
result = current.next();
}
return result;
}
};
}
}
module.exports = StackedCacheMap;

166
node_modules/webpack/lib/util/StackedMap.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const TOMBSTONE = Symbol("tombstone");
const UNDEFINED_MARKER = Symbol("undefined");
/**
* @template T
* @typedef {T | undefined} Cell<T>
*/
/**
* @template T
* @typedef {T | typeof TOMBSTONE | typeof UNDEFINED_MARKER} InternalCell<T>
*/
/**
* @template K
* @template V
* @param {[K, InternalCell<V>]} pair the internal cell
* @returns {[K, Cell<V>]} its “safe” representation
*/
const extractPair = pair => {
const key = pair[0];
const val = pair[1];
if (val === UNDEFINED_MARKER || val === TOMBSTONE) {
return [key, undefined];
} else {
return /** @type {[K, Cell<V>]} */ (pair);
}
};
/**
* @template K
* @template V
*/
class StackedMap {
/**
* @param {Map<K, InternalCell<V>>[]=} parentStack an optional parent
*/
constructor(parentStack) {
/** @type {Map<K, InternalCell<V>>} */
this.map = new Map();
/** @type {Map<K, InternalCell<V>>[]} */
this.stack = parentStack === undefined ? [] : parentStack.slice();
this.stack.push(this.map);
}
/**
* @param {K} item the key of the element to add
* @param {V} value the value of the element to add
* @returns {void}
*/
set(item, value) {
this.map.set(item, value === undefined ? UNDEFINED_MARKER : value);
}
/**
* @param {K} item the item to delete
* @returns {void}
*/
delete(item) {
if (this.stack.length > 1) {
this.map.set(item, TOMBSTONE);
} else {
this.map.delete(item);
}
}
/**
* @param {K} item the item to test
* @returns {boolean} true if the item exists in this set
*/
has(item) {
const topValue = this.map.get(item);
if (topValue !== undefined) {
return topValue !== TOMBSTONE;
}
if (this.stack.length > 1) {
for (let i = this.stack.length - 2; i >= 0; i--) {
const value = this.stack[i].get(item);
if (value !== undefined) {
this.map.set(item, value);
return value !== TOMBSTONE;
}
}
this.map.set(item, TOMBSTONE);
}
return false;
}
/**
* @param {K} item the key of the element to return
* @returns {Cell<V>} the value of the element
*/
get(item) {
const topValue = this.map.get(item);
if (topValue !== undefined) {
return topValue === TOMBSTONE || topValue === UNDEFINED_MARKER
? undefined
: topValue;
}
if (this.stack.length > 1) {
for (let i = this.stack.length - 2; i >= 0; i--) {
const value = this.stack[i].get(item);
if (value !== undefined) {
this.map.set(item, value);
return value === TOMBSTONE || value === UNDEFINED_MARKER
? undefined
: value;
}
}
this.map.set(item, TOMBSTONE);
}
return undefined;
}
_compress() {
if (this.stack.length === 1) return;
this.map = new Map();
for (const data of this.stack) {
for (const pair of data) {
if (pair[1] === TOMBSTONE) {
this.map.delete(pair[0]);
} else {
this.map.set(pair[0], pair[1]);
}
}
}
this.stack = [this.map];
}
asArray() {
this._compress();
return Array.from(this.map.keys());
}
asSet() {
this._compress();
return new Set(this.map.keys());
}
asPairArray() {
this._compress();
return Array.from(this.map.entries(), extractPair);
}
asMap() {
return new Map(this.asPairArray());
}
get size() {
this._compress();
return this.map.size;
}
createChild() {
return new StackedMap(this.stack);
}
}
module.exports = StackedMap;

55
node_modules/webpack/lib/util/StringXor.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class StringXor {
constructor() {
this._value = undefined;
}
/**
* @param {string} str string
* @returns {void}
*/
add(str) {
const len = str.length;
const value = this._value;
if (value === undefined) {
const newValue = (this._value = Buffer.allocUnsafe(len));
for (let i = 0; i < len; i++) {
newValue[i] = str.charCodeAt(i);
}
return;
}
const valueLen = value.length;
if (valueLen < len) {
const newValue = (this._value = Buffer.allocUnsafe(len));
let i;
for (i = 0; i < valueLen; i++) {
newValue[i] = value[i] ^ str.charCodeAt(i);
}
for (; i < len; i++) {
newValue[i] = str.charCodeAt(i);
}
} else {
for (let i = 0; i < len; i++) {
value[i] = value[i] ^ str.charCodeAt(i);
}
}
}
toString() {
const value = this._value;
return value === undefined ? "" : value.toString("latin1");
}
updateHash(hash) {
const value = this._value;
if (value !== undefined) hash.update(value);
}
}
module.exports = StringXor;

61
node_modules/webpack/lib/util/TupleQueue.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const TupleSet = require("./TupleSet");
/**
* @template {any[]} T
*/
class TupleQueue {
/**
* @param {Iterable<T>=} items The initial elements.
*/
constructor(items) {
/** @private @type {TupleSet<T>} */
this._set = new TupleSet(items);
/** @private @type {Iterator<T>} */
this._iterator = this._set[Symbol.iterator]();
}
/**
* Returns the number of elements in this queue.
* @returns {number} The number of elements in this queue.
*/
get length() {
return this._set.size;
}
/**
* Appends the specified element to this queue.
* @param {T} item The element to add.
* @returns {void}
*/
enqueue(...item) {
this._set.add(...item);
}
/**
* Retrieves and removes the head of this queue.
* @returns {T | undefined} The head of the queue of `undefined` if this queue is empty.
*/
dequeue() {
const result = this._iterator.next();
if (result.done) {
if (this._set.size > 0) {
this._iterator = this._set[Symbol.iterator]();
const value = this._iterator.next().value;
this._set.delete(...value);
return value;
}
return undefined;
}
this._set.delete(...result.value);
return result.value;
}
}
module.exports = TupleQueue;

150
node_modules/webpack/lib/util/TupleSet.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template {any[]} T
*/
class TupleSet {
constructor(init) {
this._map = new Map();
this.size = 0;
if (init) {
for (const tuple of init) {
this.add(...tuple);
}
}
}
/**
* @param {T} args tuple
* @returns {void}
*/
add(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
const innerMap = map.get(arg);
if (innerMap === undefined) {
map.set(arg, (map = new Map()));
} else {
map = innerMap;
}
}
const beforeLast = args[args.length - 2];
let set = map.get(beforeLast);
if (set === undefined) {
map.set(beforeLast, (set = new Set()));
}
const last = args[args.length - 1];
this.size -= set.size;
set.add(last);
this.size += set.size;
}
/**
* @param {T} args tuple
* @returns {boolean} true, if the tuple is in the Set
*/
has(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
map = map.get(arg);
if (map === undefined) {
return false;
}
}
const beforeLast = args[args.length - 2];
let set = map.get(beforeLast);
if (set === undefined) {
return false;
}
const last = args[args.length - 1];
return set.has(last);
}
/**
* @param {T} args tuple
* @returns {void}
*/
delete(...args) {
let map = this._map;
for (let i = 0; i < args.length - 2; i++) {
const arg = args[i];
map = map.get(arg);
if (map === undefined) {
return;
}
}
const beforeLast = args[args.length - 2];
let set = map.get(beforeLast);
if (set === undefined) {
return;
}
const last = args[args.length - 1];
this.size -= set.size;
set.delete(last);
this.size += set.size;
}
/**
* @returns {Iterator<T>} iterator
*/
[Symbol.iterator]() {
const iteratorStack = [];
const tuple = [];
let currentSetIterator = undefined;
const next = it => {
const result = it.next();
if (result.done) {
if (iteratorStack.length === 0) return false;
tuple.pop();
return next(iteratorStack.pop());
}
const [key, value] = result.value;
iteratorStack.push(it);
tuple.push(key);
if (value instanceof Set) {
currentSetIterator = value[Symbol.iterator]();
return true;
} else {
return next(value[Symbol.iterator]());
}
};
next(this._map[Symbol.iterator]());
return {
next() {
while (currentSetIterator) {
const result = currentSetIterator.next();
if (result.done) {
tuple.pop();
if (!next(iteratorStack.pop())) {
currentSetIterator = undefined;
}
} else {
return {
done: false,
value: /** @type {T} */ (tuple.concat(result.value))
};
}
}
return { done: true, value: undefined };
}
};
}
}
module.exports = TupleSet;

87
node_modules/webpack/lib/util/URLAbsoluteSpecifier.js generated vendored Normal file
View File

@@ -0,0 +1,87 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Ivan Kopeykin @vankop
*/
"use strict";
/** @typedef {import("./fs").InputFileSystem} InputFileSystem */
/** @typedef {(error: Error|null, result?: Buffer) => void} ErrorFirstCallback */
const backSlashCharCode = "\\".charCodeAt(0);
const slashCharCode = "/".charCodeAt(0);
const aLowerCaseCharCode = "a".charCodeAt(0);
const zLowerCaseCharCode = "z".charCodeAt(0);
const aUpperCaseCharCode = "A".charCodeAt(0);
const zUpperCaseCharCode = "Z".charCodeAt(0);
const _0CharCode = "0".charCodeAt(0);
const _9CharCode = "9".charCodeAt(0);
const plusCharCode = "+".charCodeAt(0);
const hyphenCharCode = "-".charCodeAt(0);
const colonCharCode = ":".charCodeAt(0);
const hashCharCode = "#".charCodeAt(0);
const queryCharCode = "?".charCodeAt(0);
/**
* Get scheme if specifier is an absolute URL specifier
* e.g. Absolute specifiers like 'file:///user/webpack/index.js'
* https://tools.ietf.org/html/rfc3986#section-3.1
* @param {string} specifier specifier
* @returns {string|undefined} scheme if absolute URL specifier provided
*/
function getScheme(specifier) {
const start = specifier.charCodeAt(0);
// First char maybe only a letter
if (
(start < aLowerCaseCharCode || start > zLowerCaseCharCode) &&
(start < aUpperCaseCharCode || start > zUpperCaseCharCode)
) {
return undefined;
}
let i = 1;
let ch = specifier.charCodeAt(i);
while (
(ch >= aLowerCaseCharCode && ch <= zLowerCaseCharCode) ||
(ch >= aUpperCaseCharCode && ch <= zUpperCaseCharCode) ||
(ch >= _0CharCode && ch <= _9CharCode) ||
ch === plusCharCode ||
ch === hyphenCharCode
) {
if (++i === specifier.length) return undefined;
ch = specifier.charCodeAt(i);
}
// Scheme must end with colon
if (ch !== colonCharCode) return undefined;
// Check for Windows absolute path
// https://url.spec.whatwg.org/#url-miscellaneous
if (i === 1) {
const nextChar = i + 1 < specifier.length ? specifier.charCodeAt(i + 1) : 0;
if (
nextChar === 0 ||
nextChar === backSlashCharCode ||
nextChar === slashCharCode ||
nextChar === hashCharCode ||
nextChar === queryCharCode
) {
return undefined;
}
}
return specifier.slice(0, i).toLowerCase();
}
/**
* @param {string} specifier specifier
* @returns {string|null} protocol if absolute URL specifier provided
*/
function getProtocol(specifier) {
const scheme = getScheme(specifier);
return scheme === undefined ? undefined : scheme + ":";
}
exports.getScheme = getScheme;
exports.getProtocol = getProtocol;

171
node_modules/webpack/lib/util/WeakTupleMap.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const isWeakKey = thing => typeof thing === "object" && thing !== null;
/**
* @template {any[]} T
* @template V
*/
class WeakTupleMap {
constructor() {
/** @private */
this.f = 0;
/** @private @type {any} */
this.v = undefined;
/** @private @type {Map<object, WeakTupleMap<T, V>> | undefined} */
this.m = undefined;
/** @private @type {WeakMap<object, WeakTupleMap<T, V>> | undefined} */
this.w = undefined;
}
/**
* @param {[...T, V]} args tuple
* @returns {void}
*/
set(...args) {
/** @type {WeakTupleMap<T, V>} */
let node = this;
for (let i = 0; i < args.length - 1; i++) {
node = node._get(args[i]);
}
node._setValue(args[args.length - 1]);
}
/**
* @param {T} args tuple
* @returns {boolean} true, if the tuple is in the Set
*/
has(...args) {
/** @type {WeakTupleMap<T, V>} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(args[i]);
if (node === undefined) return false;
}
return node._hasValue();
}
/**
* @param {T} args tuple
* @returns {V} the value
*/
get(...args) {
/** @type {WeakTupleMap<T, V>} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(args[i]);
if (node === undefined) return undefined;
}
return node._getValue();
}
/**
* @param {[...T, function(): V]} args tuple
* @returns {V} the value
*/
provide(...args) {
/** @type {WeakTupleMap<T, V>} */
let node = this;
for (let i = 0; i < args.length - 1; i++) {
node = node._get(args[i]);
}
if (node._hasValue()) return node._getValue();
const fn = args[args.length - 1];
const newValue = fn(...args.slice(0, -1));
node._setValue(newValue);
return newValue;
}
/**
* @param {T} args tuple
* @returns {void}
*/
delete(...args) {
/** @type {WeakTupleMap<T, V>} */
let node = this;
for (let i = 0; i < args.length; i++) {
node = node._peek(args[i]);
if (node === undefined) return;
}
node._deleteValue();
}
/**
* @returns {void}
*/
clear() {
this.f = 0;
this.v = undefined;
this.w = undefined;
this.m = undefined;
}
_getValue() {
return this.v;
}
_hasValue() {
return (this.f & 1) === 1;
}
_setValue(v) {
this.f |= 1;
this.v = v;
}
_deleteValue() {
this.f &= 6;
this.v = undefined;
}
_peek(thing) {
if (isWeakKey(thing)) {
if ((this.f & 4) !== 4) return undefined;
return this.w.get(thing);
} else {
if ((this.f & 2) !== 2) return undefined;
return this.m.get(thing);
}
}
_get(thing) {
if (isWeakKey(thing)) {
if ((this.f & 4) !== 4) {
const newMap = new WeakMap();
this.f |= 4;
const newNode = new WeakTupleMap();
(this.w = newMap).set(thing, newNode);
return newNode;
}
const entry = this.w.get(thing);
if (entry !== undefined) {
return entry;
}
const newNode = new WeakTupleMap();
this.w.set(thing, newNode);
return newNode;
} else {
if ((this.f & 2) !== 2) {
const newMap = new Map();
this.f |= 2;
const newNode = new WeakTupleMap();
(this.m = newMap).set(thing, newNode);
return newNode;
}
const entry = this.m.get(thing);
if (entry !== undefined) {
return entry;
}
const newNode = new WeakTupleMap();
this.m.set(thing, newNode);
return newNode;
}
}
}
module.exports = WeakTupleMap;

86
node_modules/webpack/lib/util/binarySearchBounds.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Mikola Lysenko @mikolalysenko
*/
"use strict";
/* cspell:disable-next-line */
// Refactor: Peter Somogyvari @petermetz
const compileSearch = (funcName, predicate, reversed, extraArgs, earlyOut) => {
const code = [
"function ",
funcName,
"(a,l,h,",
extraArgs.join(","),
"){",
earlyOut ? "" : "var i=",
reversed ? "l-1" : "h+1",
";while(l<=h){var m=(l+h)>>>1,x=a[m]"
];
if (earlyOut) {
if (predicate.indexOf("c") < 0) {
code.push(";if(x===y){return m}else if(x<=y){");
} else {
code.push(";var p=c(x,y);if(p===0){return m}else if(p<=0){");
}
} else {
code.push(";if(", predicate, "){i=m;");
}
if (reversed) {
code.push("l=m+1}else{h=m-1}");
} else {
code.push("h=m-1}else{l=m+1}");
}
code.push("}");
if (earlyOut) {
code.push("return -1};");
} else {
code.push("return i};");
}
return code.join("");
};
const compileBoundsSearch = (predicate, reversed, suffix, earlyOut) => {
const arg1 = compileSearch(
"A",
"x" + predicate + "y",
reversed,
["y"],
earlyOut
);
const arg2 = compileSearch(
"P",
"c(x,y)" + predicate + "0",
reversed,
["y", "c"],
earlyOut
);
const fnHeader = "function dispatchBinarySearch";
const fnBody =
"(a,y,c,l,h){\
if(typeof(c)==='function'){\
return P(a,(l===void 0)?0:l|0,(h===void 0)?a.length-1:h|0,y,c)\
}else{\
return A(a,(c===void 0)?0:c|0,(l===void 0)?a.length-1:l|0,y)\
}}\
return dispatchBinarySearch";
const fnArgList = [arg1, arg2, fnHeader, suffix, fnBody, suffix];
const fnSource = fnArgList.join("");
const result = new Function(fnSource);
return result();
};
module.exports = {
ge: compileBoundsSearch(">=", false, "GE"),
gt: compileBoundsSearch(">", false, "GT"),
lt: compileBoundsSearch("<", true, "LT"),
le: compileBoundsSearch("<=", true, "LE"),
eq: compileBoundsSearch("-", true, "EQ", true)
};

568
node_modules/webpack/lib/util/cleverMerge.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

459
node_modules/webpack/lib/util/comparators.js generated vendored Normal file
View File

@@ -0,0 +1,459 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { compareRuntime } = require("./runtime");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Dependency").DependencyLocation} DependencyLocation */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @template T @typedef {function(T, T): -1|0|1} Comparator */
/** @template TArg @template T @typedef {function(TArg, T, T): -1|0|1} RawParameterizedComparator */
/** @template TArg @template T @typedef {function(TArg): Comparator<T>} ParameterizedComparator */
/**
* @template T
* @param {RawParameterizedComparator<any, T>} fn comparator with argument
* @returns {ParameterizedComparator<any, T>} comparator
*/
const createCachedParameterizedComparator = fn => {
/** @type {WeakMap<object, Comparator<T>>} */
const map = new WeakMap();
return arg => {
const cachedResult = map.get(arg);
if (cachedResult !== undefined) return cachedResult;
/**
* @param {T} a first item
* @param {T} b second item
* @returns {-1|0|1} compare result
*/
const result = fn.bind(null, arg);
map.set(arg, result);
return result;
};
};
/**
* @param {Chunk} a chunk
* @param {Chunk} b chunk
* @returns {-1|0|1} compare result
*/
exports.compareChunksById = (a, b) => {
return compareIds(a.id, b.id);
};
/**
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
exports.compareModulesByIdentifier = (a, b) => {
return compareIds(a.identifier(), b.identifier());
};
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesById = (chunkGraph, a, b) => {
return compareIds(chunkGraph.getModuleId(a), chunkGraph.getModuleId(b));
};
/** @type {ParameterizedComparator<ChunkGraph, Module>} */
exports.compareModulesById =
createCachedParameterizedComparator(compareModulesById);
/**
* @param {number} a number
* @param {number} b number
* @returns {-1|0|1} compare result
*/
const compareNumbers = (a, b) => {
if (typeof a !== typeof b) {
return typeof a < typeof b ? -1 : 1;
}
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
exports.compareNumbers = compareNumbers;
/**
* @param {string} a string
* @param {string} b string
* @returns {-1|0|1} compare result
*/
const compareStringsNumeric = (a, b) => {
const partsA = a.split(/(\d+)/);
const partsB = b.split(/(\d+)/);
const len = Math.min(partsA.length, partsB.length);
for (let i = 0; i < len; i++) {
const pA = partsA[i];
const pB = partsB[i];
if (i % 2 === 0) {
if (pA.length > pB.length) {
if (pA.slice(0, pB.length) > pB) return 1;
return -1;
} else if (pB.length > pA.length) {
if (pB.slice(0, pA.length) > pA) return -1;
return 1;
} else {
if (pA < pB) return -1;
if (pA > pB) return 1;
}
} else {
const nA = +pA;
const nB = +pB;
if (nA < nB) return -1;
if (nA > nB) return 1;
}
}
if (partsB.length < partsA.length) return 1;
if (partsB.length > partsA.length) return -1;
return 0;
};
exports.compareStringsNumeric = compareStringsNumeric;
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByPostOrderIndexOrIdentifier = (moduleGraph, a, b) => {
const cmp = compareNumbers(
moduleGraph.getPostOrderIndex(a),
moduleGraph.getPostOrderIndex(b)
);
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ModuleGraph, Module>} */
exports.compareModulesByPostOrderIndexOrIdentifier =
createCachedParameterizedComparator(
compareModulesByPostOrderIndexOrIdentifier
);
/**
* @param {ModuleGraph} moduleGraph the module graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByPreOrderIndexOrIdentifier = (moduleGraph, a, b) => {
const cmp = compareNumbers(
moduleGraph.getPreOrderIndex(a),
moduleGraph.getPreOrderIndex(b)
);
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ModuleGraph, Module>} */
exports.compareModulesByPreOrderIndexOrIdentifier =
createCachedParameterizedComparator(
compareModulesByPreOrderIndexOrIdentifier
);
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Module} a module
* @param {Module} b module
* @returns {-1|0|1} compare result
*/
const compareModulesByIdOrIdentifier = (chunkGraph, a, b) => {
const cmp = compareIds(chunkGraph.getModuleId(a), chunkGraph.getModuleId(b));
if (cmp !== 0) return cmp;
return compareIds(a.identifier(), b.identifier());
};
/** @type {ParameterizedComparator<ChunkGraph, Module>} */
exports.compareModulesByIdOrIdentifier = createCachedParameterizedComparator(
compareModulesByIdOrIdentifier
);
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Chunk} a chunk
* @param {Chunk} b chunk
* @returns {-1|0|1} compare result
*/
const compareChunks = (chunkGraph, a, b) => {
return chunkGraph.compareChunks(a, b);
};
/** @type {ParameterizedComparator<ChunkGraph, Chunk>} */
exports.compareChunks = createCachedParameterizedComparator(compareChunks);
/**
* @param {string|number} a first id
* @param {string|number} b second id
* @returns {-1|0|1} compare result
*/
const compareIds = (a, b) => {
if (typeof a !== typeof b) {
return typeof a < typeof b ? -1 : 1;
}
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
exports.compareIds = compareIds;
/**
* @param {string} a first string
* @param {string} b second string
* @returns {-1|0|1} compare result
*/
const compareStrings = (a, b) => {
if (a < b) return -1;
if (a > b) return 1;
return 0;
};
exports.compareStrings = compareStrings;
/**
* @param {ChunkGroup} a first chunk group
* @param {ChunkGroup} b second chunk group
* @returns {-1|0|1} compare result
*/
const compareChunkGroupsByIndex = (a, b) => {
return a.index < b.index ? -1 : 1;
};
exports.compareChunkGroupsByIndex = compareChunkGroupsByIndex;
/**
* @template K1 {Object}
* @template K2
* @template T
*/
class TwoKeyWeakMap {
constructor() {
/** @private @type {WeakMap<any, WeakMap<any, T>>} */
this._map = new WeakMap();
}
/**
* @param {K1} key1 first key
* @param {K2} key2 second key
* @returns {T | undefined} value
*/
get(key1, key2) {
const childMap = this._map.get(key1);
if (childMap === undefined) {
return undefined;
}
return childMap.get(key2);
}
/**
* @param {K1} key1 first key
* @param {K2} key2 second key
* @param {T | undefined} value new value
* @returns {void}
*/
set(key1, key2, value) {
let childMap = this._map.get(key1);
if (childMap === undefined) {
childMap = new WeakMap();
this._map.set(key1, childMap);
}
childMap.set(key2, value);
}
}
/** @type {TwoKeyWeakMap<Comparator<any>, Comparator<any>, Comparator<any>>}} */
const concatComparatorsCache = new TwoKeyWeakMap();
/**
* @template T
* @param {Comparator<T>} c1 comparator
* @param {Comparator<T>} c2 comparator
* @param {Comparator<T>[]} cRest comparators
* @returns {Comparator<T>} comparator
*/
const concatComparators = (c1, c2, ...cRest) => {
if (cRest.length > 0) {
const [c3, ...cRest2] = cRest;
return concatComparators(c1, concatComparators(c2, c3, ...cRest2));
}
const cacheEntry = /** @type {Comparator<T>} */ (
concatComparatorsCache.get(c1, c2)
);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {T} a first value
* @param {T} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const res = c1(a, b);
if (res !== 0) return res;
return c2(a, b);
};
concatComparatorsCache.set(c1, c2, result);
return result;
};
exports.concatComparators = concatComparators;
/** @template A, B @typedef {(input: A) => B} Selector */
/** @type {TwoKeyWeakMap<Selector<any, any>, Comparator<any>, Comparator<any>>}} */
const compareSelectCache = new TwoKeyWeakMap();
/**
* @template T
* @template R
* @param {Selector<T, R>} getter getter for value
* @param {Comparator<R>} comparator comparator
* @returns {Comparator<T>} comparator
*/
const compareSelect = (getter, comparator) => {
const cacheEntry = compareSelectCache.get(getter, comparator);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {T} a first value
* @param {T} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const aValue = getter(a);
const bValue = getter(b);
if (aValue !== undefined && aValue !== null) {
if (bValue !== undefined && bValue !== null) {
return comparator(aValue, bValue);
}
return -1;
} else {
if (bValue !== undefined && bValue !== null) {
return 1;
}
return 0;
}
};
compareSelectCache.set(getter, comparator, result);
return result;
};
exports.compareSelect = compareSelect;
/** @type {WeakMap<Comparator<any>, Comparator<Iterable<any>>>} */
const compareIteratorsCache = new WeakMap();
/**
* @template T
* @param {Comparator<T>} elementComparator comparator for elements
* @returns {Comparator<Iterable<T>>} comparator for iterables of elements
*/
const compareIterables = elementComparator => {
const cacheEntry = compareIteratorsCache.get(elementComparator);
if (cacheEntry !== undefined) return cacheEntry;
/**
* @param {Iterable<T>} a first value
* @param {Iterable<T>} b second value
* @returns {-1|0|1} compare result
*/
const result = (a, b) => {
const aI = a[Symbol.iterator]();
const bI = b[Symbol.iterator]();
// eslint-disable-next-line no-constant-condition
while (true) {
const aItem = aI.next();
const bItem = bI.next();
if (aItem.done) {
return bItem.done ? 0 : -1;
} else if (bItem.done) {
return 1;
}
const res = elementComparator(aItem.value, bItem.value);
if (res !== 0) return res;
}
};
compareIteratorsCache.set(elementComparator, result);
return result;
};
exports.compareIterables = compareIterables;
// TODO this is no longer needed when minimum node.js version is >= 12
// since these versions ship with a stable sort function
/**
* @template T
* @param {Iterable<T>} iterable original ordered list
* @returns {Comparator<T>} comparator
*/
exports.keepOriginalOrder = iterable => {
/** @type {Map<T, number>} */
const map = new Map();
let i = 0;
for (const item of iterable) {
map.set(item, i++);
}
return (a, b) => compareNumbers(map.get(a), map.get(b));
};
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @returns {Comparator<Chunk>} comparator
*/
exports.compareChunksNatural = chunkGraph => {
const cmpFn = exports.compareModulesById(chunkGraph);
const cmpIterableFn = compareIterables(cmpFn);
return concatComparators(
compareSelect(chunk => chunk.name, compareIds),
compareSelect(chunk => chunk.runtime, compareRuntime),
compareSelect(
/**
* @param {Chunk} chunk a chunk
* @returns {Iterable<Module>} modules
*/
chunk => chunkGraph.getOrderedChunkModulesIterable(chunk, cmpFn),
cmpIterableFn
)
);
};
/**
* Compare two locations
* @param {DependencyLocation} a A location node
* @param {DependencyLocation} b A location node
* @returns {-1|0|1} sorting comparator value
*/
exports.compareLocations = (a, b) => {
let isObjectA = typeof a === "object" && a !== null;
let isObjectB = typeof b === "object" && b !== null;
if (!isObjectA || !isObjectB) {
if (isObjectA) return 1;
if (isObjectB) return -1;
return 0;
}
if ("start" in a) {
if ("start" in b) {
const ap = a.start;
const bp = b.start;
if (ap.line < bp.line) return -1;
if (ap.line > bp.line) return 1;
if (ap.column < bp.column) return -1;
if (ap.column > bp.column) return 1;
} else return -1;
} else if ("start" in b) return 1;
if ("name" in a) {
if ("name" in b) {
if (a.name < b.name) return -1;
if (a.name > b.name) return 1;
} else return -1;
} else if ("name" in b) return 1;
if ("index" in a) {
if ("index" in b) {
if (a.index < b.index) return -1;
if (a.index > b.index) return 1;
} else return -1;
} else if ("index" in b) return 1;
return 0;
};

206
node_modules/webpack/lib/util/compileBooleanMatcher.js generated vendored Normal file
View File

@@ -0,0 +1,206 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const quoteMeta = str => {
return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
};
const toSimpleString = str => {
if (`${+str}` === str) {
return str;
}
return JSON.stringify(str);
};
/**
* @param {Record<string|number, boolean>} map value map
* @returns {boolean|(function(string): string)} true/false, when unconditionally true/false, or a template function to determine the value at runtime
*/
const compileBooleanMatcher = map => {
const positiveItems = Object.keys(map).filter(i => map[i]);
const negativeItems = Object.keys(map).filter(i => !map[i]);
if (positiveItems.length === 0) return false;
if (negativeItems.length === 0) return true;
return compileBooleanMatcherFromLists(positiveItems, negativeItems);
};
/**
* @param {string[]} positiveItems positive items
* @param {string[]} negativeItems negative items
* @returns {function(string): string} a template function to determine the value at runtime
*/
const compileBooleanMatcherFromLists = (positiveItems, negativeItems) => {
if (positiveItems.length === 0) return () => "false";
if (negativeItems.length === 0) return () => "true";
if (positiveItems.length === 1)
return value => `${toSimpleString(positiveItems[0])} == ${value}`;
if (negativeItems.length === 1)
return value => `${toSimpleString(negativeItems[0])} != ${value}`;
const positiveRegexp = itemsToRegexp(positiveItems);
const negativeRegexp = itemsToRegexp(negativeItems);
if (positiveRegexp.length <= negativeRegexp.length) {
return value => `/^${positiveRegexp}$/.test(${value})`;
} else {
return value => `!/^${negativeRegexp}$/.test(${value})`;
}
};
const popCommonItems = (itemsSet, getKey, condition) => {
const map = new Map();
for (const item of itemsSet) {
const key = getKey(item);
if (key) {
let list = map.get(key);
if (list === undefined) {
list = [];
map.set(key, list);
}
list.push(item);
}
}
const result = [];
for (const list of map.values()) {
if (condition(list)) {
for (const item of list) {
itemsSet.delete(item);
}
result.push(list);
}
}
return result;
};
const getCommonPrefix = items => {
let prefix = items[0];
for (let i = 1; i < items.length; i++) {
const item = items[i];
for (let p = 0; p < prefix.length; p++) {
if (item[p] !== prefix[p]) {
prefix = prefix.slice(0, p);
break;
}
}
}
return prefix;
};
const getCommonSuffix = items => {
let suffix = items[0];
for (let i = 1; i < items.length; i++) {
const item = items[i];
for (let p = item.length - 1, s = suffix.length - 1; s >= 0; p--, s--) {
if (item[p] !== suffix[s]) {
suffix = suffix.slice(s + 1);
break;
}
}
}
return suffix;
};
const itemsToRegexp = itemsArr => {
if (itemsArr.length === 1) {
return quoteMeta(itemsArr[0]);
}
const finishedItems = [];
// merge single char items: (a|b|c|d|ef) => ([abcd]|ef)
let countOfSingleCharItems = 0;
for (const item of itemsArr) {
if (item.length === 1) {
countOfSingleCharItems++;
}
}
// special case for only single char items
if (countOfSingleCharItems === itemsArr.length) {
return `[${quoteMeta(itemsArr.sort().join(""))}]`;
}
const items = new Set(itemsArr.sort());
if (countOfSingleCharItems > 2) {
let singleCharItems = "";
for (const item of items) {
if (item.length === 1) {
singleCharItems += item;
items.delete(item);
}
}
finishedItems.push(`[${quoteMeta(singleCharItems)}]`);
}
// special case for 2 items with common prefix/suffix
if (finishedItems.length === 0 && items.size === 2) {
const prefix = getCommonPrefix(itemsArr);
const suffix = getCommonSuffix(
itemsArr.map(item => item.slice(prefix.length))
);
if (prefix.length > 0 || suffix.length > 0) {
return `${quoteMeta(prefix)}${itemsToRegexp(
itemsArr.map(i => i.slice(prefix.length, -suffix.length || undefined))
)}${quoteMeta(suffix)}`;
}
}
// special case for 2 items with common suffix
if (finishedItems.length === 0 && items.size === 2) {
const it = items[Symbol.iterator]();
const a = it.next().value;
const b = it.next().value;
if (a.length > 0 && b.length > 0 && a.slice(-1) === b.slice(-1)) {
return `${itemsToRegexp([a.slice(0, -1), b.slice(0, -1)])}${quoteMeta(
a.slice(-1)
)}`;
}
}
// find common prefix: (a1|a2|a3|a4|b5) => (a(1|2|3|4)|b5)
const prefixed = popCommonItems(
items,
item => (item.length >= 1 ? item[0] : false),
list => {
if (list.length >= 3) return true;
if (list.length <= 1) return false;
return list[0][1] === list[1][1];
}
);
for (const prefixedItems of prefixed) {
const prefix = getCommonPrefix(prefixedItems);
finishedItems.push(
`${quoteMeta(prefix)}${itemsToRegexp(
prefixedItems.map(i => i.slice(prefix.length))
)}`
);
}
// find common suffix: (a1|b1|c1|d1|e2) => ((a|b|c|d)1|e2)
const suffixed = popCommonItems(
items,
item => (item.length >= 1 ? item.slice(-1) : false),
list => {
if (list.length >= 3) return true;
if (list.length <= 1) return false;
return list[0].slice(-2) === list[1].slice(-2);
}
);
for (const suffixedItems of suffixed) {
const suffix = getCommonSuffix(suffixedItems);
finishedItems.push(
`${itemsToRegexp(
suffixedItems.map(i => i.slice(0, -suffix.length))
)}${quoteMeta(suffix)}`
);
}
// TODO further optimize regexp, i. e.
// use ranges: (1|2|3|4|a) => [1-4a]
const conditional = finishedItems.concat(Array.from(items, quoteMeta));
if (conditional.length === 1) return conditional[0];
return `(${conditional.join("|")})`;
};
compileBooleanMatcher.fromLists = compileBooleanMatcherFromLists;
compileBooleanMatcher.itemsToRegexp = itemsToRegexp;
module.exports = compileBooleanMatcher;

View File

@@ -0,0 +1,28 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const memoize = require("./memoize");
const getValidate = memoize(() => require("schema-utils").validate);
const createSchemaValidation = (check, getSchema, options) => {
getSchema = memoize(getSchema);
return value => {
if (check && !check(value)) {
getValidate()(getSchema(), value, options);
if (check) {
require("util").deprecate(
() => {},
"webpack bug: Pre-compiled schema reports error while real schema is happy. This has performance drawbacks.",
"DEP_WEBPACK_PRE_COMPILED_SCHEMA_INVALID"
)();
}
}
};
};
module.exports = createSchemaValidation;

171
node_modules/webpack/lib/util/createHash.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hash = require("./Hash");
const BULK_SIZE = 2000;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
const digestCaches = {};
class BulkUpdateDecorator extends Hash {
/**
* @param {Hash | function(): Hash} hashOrFactory function to create a hash
* @param {string=} hashKey key for caching
*/
constructor(hashOrFactory, hashKey) {
super();
this.hashKey = hashKey;
if (typeof hashOrFactory === "function") {
this.hashFactory = hashOrFactory;
this.hash = undefined;
} else {
this.hashFactory = undefined;
this.hash = hashOrFactory;
}
this.buffer = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (
inputEncoding !== undefined ||
typeof data !== "string" ||
data.length > BULK_SIZE
) {
if (this.hash === undefined) this.hash = this.hashFactory();
if (this.buffer.length > 0) {
this.hash.update(this.buffer);
this.buffer = "";
}
this.hash.update(data, inputEncoding);
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
if (this.hash === undefined) this.hash = this.hashFactory();
this.hash.update(this.buffer);
this.buffer = "";
}
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
let digestCache;
const buffer = this.buffer;
if (this.hash === undefined) {
// short data for hash, we can use caching
const cacheKey = `${this.hashKey}-${encoding}`;
digestCache = digestCaches[cacheKey];
if (digestCache === undefined) {
digestCache = digestCaches[cacheKey] = new Map();
}
const cacheEntry = digestCache.get(buffer);
if (cacheEntry !== undefined) return cacheEntry;
this.hash = this.hashFactory();
}
if (buffer.length > 0) {
this.hash.update(buffer);
}
const digestResult = this.hash.digest(encoding);
const result =
typeof digestResult === "string" ? digestResult : digestResult.toString();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
}
/* istanbul ignore next */
class DebugHash extends Hash {
constructor() {
super();
this.string = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf-8");
if (data.startsWith("debug-digest-")) {
data = Buffer.from(data.slice("debug-digest-".length), "hex").toString();
}
this.string += `[${data}](${new Error().stack.split("\n", 3)[2]})\n`;
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
return "debug-digest-" + Buffer.from(this.string).toString("hex");
}
}
let crypto = undefined;
let createXXHash64 = undefined;
let createMd4 = undefined;
let BatchedHash = undefined;
/**
* Creates a hash by name or function
* @param {string | typeof Hash} algorithm the algorithm name or a constructor creating a hash
* @returns {Hash} the hash
*/
module.exports = algorithm => {
if (typeof algorithm === "function") {
return new BulkUpdateDecorator(() => new algorithm());
}
switch (algorithm) {
// TODO add non-cryptographic algorithm here
case "debug":
return new DebugHash();
case "xxhash64":
if (createXXHash64 === undefined) {
createXXHash64 = require("./hash/xxhash64");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new BatchedHash(createXXHash64());
case "md4":
if (createMd4 === undefined) {
createMd4 = require("./hash/md4");
if (BatchedHash === undefined) {
BatchedHash = require("./hash/BatchedHash");
}
}
return new BatchedHash(createMd4());
case "native-md4":
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(() => crypto.createHash("md4"), "md4");
default:
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() => crypto.createHash(algorithm),
algorithm
);
}
};

264
node_modules/webpack/lib/util/deprecation.js generated vendored Normal file
View File

@@ -0,0 +1,264 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const util = require("util");
/** @type {Map<string, Function>} */
const deprecationCache = new Map();
/**
* @typedef {Object} FakeHookMarker
* @property {true} _fakeHook it's a fake hook
*/
/** @template T @typedef {T & FakeHookMarker} FakeHook<T> */
/**
* @param {string} message deprecation message
* @param {string} code deprecation code
* @returns {Function} function to trigger deprecation
*/
const createDeprecation = (message, code) => {
const cached = deprecationCache.get(message);
if (cached !== undefined) return cached;
const fn = util.deprecate(
() => {},
message,
"DEP_WEBPACK_DEPRECATION_" + code
);
deprecationCache.set(message, fn);
return fn;
};
const COPY_METHODS = [
"concat",
"entry",
"filter",
"find",
"findIndex",
"includes",
"indexOf",
"join",
"lastIndexOf",
"map",
"reduce",
"reduceRight",
"slice",
"some"
];
const DISABLED_METHODS = [
"copyWithin",
"entries",
"fill",
"keys",
"pop",
"reverse",
"shift",
"splice",
"sort",
"unshift"
];
/**
* @param {any} set new set
* @param {string} name property name
* @returns {void}
*/
exports.arrayToSetDeprecation = (set, name) => {
for (const method of COPY_METHODS) {
if (set[method]) continue;
const d = createDeprecation(
`${name} was changed from Array to Set (using Array method '${method}' is deprecated)`,
"ARRAY_TO_SET"
);
/**
* @deprecated
* @this {Set}
* @returns {number} count
*/
set[method] = function () {
d();
const array = Array.from(this);
return Array.prototype[method].apply(array, arguments);
};
}
const dPush = createDeprecation(
`${name} was changed from Array to Set (using Array method 'push' is deprecated)`,
"ARRAY_TO_SET_PUSH"
);
const dLength = createDeprecation(
`${name} was changed from Array to Set (using Array property 'length' is deprecated)`,
"ARRAY_TO_SET_LENGTH"
);
const dIndexer = createDeprecation(
`${name} was changed from Array to Set (indexing Array is deprecated)`,
"ARRAY_TO_SET_INDEXER"
);
/**
* @deprecated
* @this {Set}
* @returns {number} count
*/
set.push = function () {
dPush();
for (const item of Array.from(arguments)) {
this.add(item);
}
return this.size;
};
for (const method of DISABLED_METHODS) {
if (set[method]) continue;
set[method] = () => {
throw new Error(
`${name} was changed from Array to Set (using Array method '${method}' is not possible)`
);
};
}
const createIndexGetter = index => {
/**
* @this {Set} a Set
* @returns {any} the value at this location
*/
const fn = function () {
dIndexer();
let i = 0;
for (const item of this) {
if (i++ === index) return item;
}
return undefined;
};
return fn;
};
const defineIndexGetter = index => {
Object.defineProperty(set, index, {
get: createIndexGetter(index),
set(value) {
throw new Error(
`${name} was changed from Array to Set (indexing Array with write is not possible)`
);
}
});
};
defineIndexGetter(0);
let indexerDefined = 1;
Object.defineProperty(set, "length", {
get() {
dLength();
const length = this.size;
for (indexerDefined; indexerDefined < length + 1; indexerDefined++) {
defineIndexGetter(indexerDefined);
}
return length;
},
set(value) {
throw new Error(
`${name} was changed from Array to Set (writing to Array property 'length' is not possible)`
);
}
});
set[Symbol.isConcatSpreadable] = true;
};
exports.createArrayToSetDeprecationSet = name => {
let initialized = false;
class SetDeprecatedArray extends Set {
constructor(items) {
super(items);
if (!initialized) {
initialized = true;
exports.arrayToSetDeprecation(SetDeprecatedArray.prototype, name);
}
}
}
return SetDeprecatedArray;
};
exports.soonFrozenObjectDeprecation = (obj, name, code, note = "") => {
const message = `${name} will be frozen in future, all modifications are deprecated.${
note && `\n${note}`
}`;
return new Proxy(obj, {
set: util.deprecate(
(target, property, value, receiver) =>
Reflect.set(target, property, value, receiver),
message,
code
),
defineProperty: util.deprecate(
(target, property, descriptor) =>
Reflect.defineProperty(target, property, descriptor),
message,
code
),
deleteProperty: util.deprecate(
(target, property) => Reflect.deleteProperty(target, property),
message,
code
),
setPrototypeOf: util.deprecate(
(target, proto) => Reflect.setPrototypeOf(target, proto),
message,
code
)
});
};
/**
* @template T
* @param {T} obj object
* @param {string} message deprecation message
* @param {string} code deprecation code
* @returns {T} object with property access deprecated
*/
const deprecateAllProperties = (obj, message, code) => {
const newObj = {};
const descriptors = Object.getOwnPropertyDescriptors(obj);
for (const name of Object.keys(descriptors)) {
const descriptor = descriptors[name];
if (typeof descriptor.value === "function") {
Object.defineProperty(newObj, name, {
...descriptor,
value: util.deprecate(descriptor.value, message, code)
});
} else if (descriptor.get || descriptor.set) {
Object.defineProperty(newObj, name, {
...descriptor,
get: descriptor.get && util.deprecate(descriptor.get, message, code),
set: descriptor.set && util.deprecate(descriptor.set, message, code)
});
} else {
let value = descriptor.value;
Object.defineProperty(newObj, name, {
configurable: descriptor.configurable,
enumerable: descriptor.enumerable,
get: util.deprecate(() => value, message, code),
set: descriptor.writable
? util.deprecate(v => (value = v), message, code)
: undefined
});
}
}
return /** @type {T} */ (newObj);
};
exports.deprecateAllProperties = deprecateAllProperties;
/**
* @template T
* @param {T} fakeHook fake hook implementation
* @param {string=} message deprecation message (not deprecated when unset)
* @param {string=} code deprecation code (not deprecated when unset)
* @returns {FakeHook<T>} fake hook which redirects
*/
exports.createFakeHook = (fakeHook, message, code) => {
if (message && code) {
fakeHook = deprecateAllProperties(fakeHook, message, code);
}
return Object.freeze(
Object.assign(fakeHook, { _fakeHook: /** @type {true} */ (true) })
);
};

508
node_modules/webpack/lib/util/deterministicGrouping.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

18
node_modules/webpack/lib/util/extractUrlAndGlobal.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Sam Chen @chenxsan
*/
"use strict";
/**
* @param {string} urlAndGlobal the script request
* @returns {string[]} script url and its global variable
*/
module.exports = function extractUrlAndGlobal(urlAndGlobal) {
const index = urlAndGlobal.indexOf("@");
if (index <= 0 || index === urlAndGlobal.length - 1) {
throw new Error(`Invalid request "${urlAndGlobal}"`);
}
return [urlAndGlobal.substring(index + 1), urlAndGlobal.substring(0, index)];
};

229
node_modules/webpack/lib/util/findGraphRoots.js generated vendored Normal file
View File

@@ -0,0 +1,229 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const NO_MARKER = 0;
const IN_PROGRESS_MARKER = 1;
const DONE_MARKER = 2;
const DONE_MAYBE_ROOT_CYCLE_MARKER = 3;
const DONE_AND_ROOT_MARKER = 4;
/**
* @template T
*/
class Node {
/**
* @param {T} item the value of the node
*/
constructor(item) {
this.item = item;
/** @type {Set<Node<T>>} */
this.dependencies = new Set();
this.marker = NO_MARKER;
/** @type {Cycle<T> | undefined} */
this.cycle = undefined;
this.incoming = 0;
}
}
/**
* @template T
*/
class Cycle {
constructor() {
/** @type {Set<Node<T>>} */
this.nodes = new Set();
}
}
/**
* @template T
* @typedef {Object} StackEntry
* @property {Node<T>} node
* @property {Node<T>[]} openEdges
*/
/**
* @template T
* @param {Iterable<T>} items list of items
* @param {function(T): Iterable<T>} getDependencies function to get dependencies of an item (items that are not in list are ignored)
* @returns {Iterable<T>} graph roots of the items
*/
module.exports = (items, getDependencies) => {
/** @type {Map<T, Node<T>>} */
const itemToNode = new Map();
for (const item of items) {
const node = new Node(item);
itemToNode.set(item, node);
}
// early exit when there is only a single item
if (itemToNode.size <= 1) return items;
// grab all the dependencies
for (const node of itemToNode.values()) {
for (const dep of getDependencies(node.item)) {
const depNode = itemToNode.get(dep);
if (depNode !== undefined) {
node.dependencies.add(depNode);
}
}
}
// Set of current root modules
// items will be removed if a new reference to it has been found
/** @type {Set<Node<T>>} */
const roots = new Set();
// Set of current cycles without references to it
// cycles will be removed if a new reference to it has been found
// that is not part of the cycle
/** @type {Set<Cycle<T>>} */
const rootCycles = new Set();
// For all non-marked nodes
for (const selectedNode of itemToNode.values()) {
if (selectedNode.marker === NO_MARKER) {
// deep-walk all referenced modules
// in a non-recursive way
// start by entering the selected node
selectedNode.marker = IN_PROGRESS_MARKER;
// keep a stack to avoid recursive walk
/** @type {StackEntry<T>[]} */
const stack = [
{
node: selectedNode,
openEdges: Array.from(selectedNode.dependencies)
}
];
// process the top item until stack is empty
while (stack.length > 0) {
const topOfStack = stack[stack.length - 1];
// Are there still edges unprocessed in the current node?
if (topOfStack.openEdges.length > 0) {
// Process one dependency
const dependency = topOfStack.openEdges.pop();
switch (dependency.marker) {
case NO_MARKER:
// dependency has not be visited yet
// mark it as in-progress and recurse
stack.push({
node: dependency,
openEdges: Array.from(dependency.dependencies)
});
dependency.marker = IN_PROGRESS_MARKER;
break;
case IN_PROGRESS_MARKER: {
// It's a in-progress cycle
let cycle = dependency.cycle;
if (!cycle) {
cycle = new Cycle();
cycle.nodes.add(dependency);
dependency.cycle = cycle;
}
// set cycle property for each node in the cycle
// if nodes are already part of a cycle
// we merge the cycles to a shared cycle
for (
let i = stack.length - 1;
stack[i].node !== dependency;
i--
) {
const node = stack[i].node;
if (node.cycle) {
if (node.cycle !== cycle) {
// merge cycles
for (const cycleNode of node.cycle.nodes) {
cycleNode.cycle = cycle;
cycle.nodes.add(cycleNode);
}
}
} else {
node.cycle = cycle;
cycle.nodes.add(node);
}
}
// don't recurse into dependencies
// these are already on the stack
break;
}
case DONE_AND_ROOT_MARKER:
// This node has be visited yet and is currently a root node
// But as this is a new reference to the node
// it's not really a root
// so we have to convert it to a normal node
dependency.marker = DONE_MARKER;
roots.delete(dependency);
break;
case DONE_MAYBE_ROOT_CYCLE_MARKER:
// This node has be visited yet and
// is maybe currently part of a completed root cycle
// we found a new reference to the cycle
// so it's not really a root cycle
// remove the cycle from the root cycles
// and convert it to a normal node
rootCycles.delete(dependency.cycle);
dependency.marker = DONE_MARKER;
break;
// DONE_MARKER: nothing to do, don't recurse into dependencies
}
} else {
// All dependencies of the current node has been visited
// we leave the node
stack.pop();
topOfStack.node.marker = DONE_MARKER;
}
}
const cycle = selectedNode.cycle;
if (cycle) {
for (const node of cycle.nodes) {
node.marker = DONE_MAYBE_ROOT_CYCLE_MARKER;
}
rootCycles.add(cycle);
} else {
selectedNode.marker = DONE_AND_ROOT_MARKER;
roots.add(selectedNode);
}
}
}
// Extract roots from root cycles
// We take the nodes with most incoming edges
// inside of the cycle
for (const cycle of rootCycles) {
let max = 0;
/** @type {Set<Node<T>>} */
const cycleRoots = new Set();
const nodes = cycle.nodes;
for (const node of nodes) {
for (const dep of node.dependencies) {
if (nodes.has(dep)) {
dep.incoming++;
if (dep.incoming < max) continue;
if (dep.incoming > max) {
cycleRoots.clear();
max = dep.incoming;
}
cycleRoots.add(dep);
}
}
}
for (const cycleRoot of cycleRoots) {
roots.add(cycleRoot);
}
}
// When roots were found, return them
if (roots.size > 0) {
return Array.from(roots, r => r.item);
} else {
throw new Error("Implementation of findGraphRoots is broken");
}
};

337
node_modules/webpack/lib/util/fs.js generated vendored Normal file
View File

@@ -0,0 +1,337 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const path = require("path");
/** @typedef {import("../../declarations/WebpackOptions").WatchOptions} WatchOptions */
/** @typedef {import("../FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
/**
* @typedef {Object} IStats
* @property {() => boolean} isFile
* @property {() => boolean} isDirectory
* @property {() => boolean} isBlockDevice
* @property {() => boolean} isCharacterDevice
* @property {() => boolean} isSymbolicLink
* @property {() => boolean} isFIFO
* @property {() => boolean} isSocket
* @property {number | bigint} dev
* @property {number | bigint} ino
* @property {number | bigint} mode
* @property {number | bigint} nlink
* @property {number | bigint} uid
* @property {number | bigint} gid
* @property {number | bigint} rdev
* @property {number | bigint} size
* @property {number | bigint} blksize
* @property {number | bigint} blocks
* @property {number | bigint} atimeMs
* @property {number | bigint} mtimeMs
* @property {number | bigint} ctimeMs
* @property {number | bigint} birthtimeMs
* @property {Date} atime
* @property {Date} mtime
* @property {Date} ctime
* @property {Date} birthtime
*/
/**
* @typedef {Object} IDirent
* @property {() => boolean} isFile
* @property {() => boolean} isDirectory
* @property {() => boolean} isBlockDevice
* @property {() => boolean} isCharacterDevice
* @property {() => boolean} isSymbolicLink
* @property {() => boolean} isFIFO
* @property {() => boolean} isSocket
* @property {string | Buffer} name
*/
/** @typedef {function((NodeJS.ErrnoException | null)=): void} Callback */
/** @typedef {function((NodeJS.ErrnoException | null)=, Buffer=): void} BufferCallback */
/** @typedef {function((NodeJS.ErrnoException | null)=, Buffer|string=): void} BufferOrStringCallback */
/** @typedef {function((NodeJS.ErrnoException | null)=, (string | Buffer)[] | IDirent[]=): void} DirentArrayCallback */
/** @typedef {function((NodeJS.ErrnoException | null)=, string=): void} StringCallback */
/** @typedef {function((NodeJS.ErrnoException | null)=, number=): void} NumberCallback */
/** @typedef {function((NodeJS.ErrnoException | null)=, IStats=): void} StatsCallback */
/** @typedef {function((NodeJS.ErrnoException | Error | null)=, any=): void} ReadJsonCallback */
/** @typedef {function((NodeJS.ErrnoException | Error | null)=, IStats|string=): void} LstatReadlinkAbsoluteCallback */
/**
* @typedef {Object} WatcherInfo
* @property {Set<string>} changes get current aggregated changes that have not yet send to callback
* @property {Set<string>} removals get current aggregated removals that have not yet send to callback
* @property {Map<string, FileSystemInfoEntry | "ignore">} fileTimeInfoEntries get info about files
* @property {Map<string, FileSystemInfoEntry | "ignore">} contextTimeInfoEntries get info about directories
*/
// TODO webpack 6 deprecate missing getInfo
/**
* @typedef {Object} Watcher
* @property {function(): void} close closes the watcher and all underlying file watchers
* @property {function(): void} pause closes the watcher, but keeps underlying file watchers alive until the next watch call
* @property {function(): Set<string>=} getAggregatedChanges get current aggregated changes that have not yet send to callback
* @property {function(): Set<string>=} getAggregatedRemovals get current aggregated removals that have not yet send to callback
* @property {function(): Map<string, FileSystemInfoEntry | "ignore">} getFileTimeInfoEntries get info about files
* @property {function(): Map<string, FileSystemInfoEntry | "ignore">} getContextTimeInfoEntries get info about directories
* @property {function(): WatcherInfo=} getInfo get info about timestamps and changes
*/
/**
* @callback WatchMethod
* @param {Iterable<string>} files watched files
* @param {Iterable<string>} directories watched directories
* @param {Iterable<string>} missing watched exitance entries
* @param {number} startTime timestamp of start time
* @param {WatchOptions} options options object
* @param {function(Error=, Map<string, FileSystemInfoEntry | "ignore">, Map<string, FileSystemInfoEntry | "ignore">, Set<string>, Set<string>): void} callback aggregated callback
* @param {function(string, number): void} callbackUndelayed callback when the first change was detected
* @returns {Watcher} a watcher
*/
// TODO webpack 6 make optional methods required
/**
* @typedef {Object} OutputFileSystem
* @property {function(string, Buffer|string, Callback): void} writeFile
* @property {function(string, Callback): void} mkdir
* @property {function(string, DirentArrayCallback): void=} readdir
* @property {function(string, Callback): void=} rmdir
* @property {function(string, Callback): void=} unlink
* @property {function(string, StatsCallback): void} stat
* @property {function(string, StatsCallback): void=} lstat
* @property {function(string, BufferOrStringCallback): void} readFile
* @property {(function(string, string): string)=} join
* @property {(function(string, string): string)=} relative
* @property {(function(string): string)=} dirname
*/
/**
* @typedef {Object} InputFileSystem
* @property {function(string, BufferOrStringCallback): void} readFile
* @property {(function(string, ReadJsonCallback): void)=} readJson
* @property {function(string, BufferOrStringCallback): void} readlink
* @property {function(string, DirentArrayCallback): void} readdir
* @property {function(string, StatsCallback): void} stat
* @property {function(string, StatsCallback): void=} lstat
* @property {(function(string, BufferOrStringCallback): void)=} realpath
* @property {(function(string=): void)=} purge
* @property {(function(string, string): string)=} join
* @property {(function(string, string): string)=} relative
* @property {(function(string): string)=} dirname
*/
/**
* @typedef {Object} WatchFileSystem
* @property {WatchMethod} watch
*/
/**
* @typedef {Object} IntermediateFileSystemExtras
* @property {function(string): void} mkdirSync
* @property {function(string): NodeJS.WritableStream} createWriteStream
* @property {function(string, string, NumberCallback): void} open
* @property {function(number, Buffer, number, number, number, NumberCallback): void} read
* @property {function(number, Callback): void} close
* @property {function(string, string, Callback): void} rename
*/
/** @typedef {InputFileSystem & OutputFileSystem & IntermediateFileSystemExtras} IntermediateFileSystem */
/**
*
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} rootPath the root path
* @param {string} targetPath the target path
* @returns {string} location of targetPath relative to rootPath
*/
const relative = (fs, rootPath, targetPath) => {
if (fs && fs.relative) {
return fs.relative(rootPath, targetPath);
} else if (path.posix.isAbsolute(rootPath)) {
return path.posix.relative(rootPath, targetPath);
} else if (path.win32.isAbsolute(rootPath)) {
return path.win32.relative(rootPath, targetPath);
} else {
throw new Error(
`${rootPath} is neither a posix nor a windows path, and there is no 'relative' method defined in the file system`
);
}
};
exports.relative = relative;
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} rootPath a path
* @param {string} filename a filename
* @returns {string} the joined path
*/
const join = (fs, rootPath, filename) => {
if (fs && fs.join) {
return fs.join(rootPath, filename);
} else if (path.posix.isAbsolute(rootPath)) {
return path.posix.join(rootPath, filename);
} else if (path.win32.isAbsolute(rootPath)) {
return path.win32.join(rootPath, filename);
} else {
throw new Error(
`${rootPath} is neither a posix nor a windows path, and there is no 'join' method defined in the file system`
);
}
};
exports.join = join;
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {string} absPath an absolute path
* @returns {string} the parent directory of the absolute path
*/
const dirname = (fs, absPath) => {
if (fs && fs.dirname) {
return fs.dirname(absPath);
} else if (path.posix.isAbsolute(absPath)) {
return path.posix.dirname(absPath);
} else if (path.win32.isAbsolute(absPath)) {
return path.win32.dirname(absPath);
} else {
throw new Error(
`${absPath} is neither a posix nor a windows path, and there is no 'dirname' method defined in the file system`
);
}
};
exports.dirname = dirname;
/**
* @param {OutputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {function(Error=): void} callback callback function for the error
* @returns {void}
*/
const mkdirp = (fs, p, callback) => {
fs.mkdir(p, err => {
if (err) {
if (err.code === "ENOENT") {
const dir = dirname(fs, p);
if (dir === p) {
callback(err);
return;
}
mkdirp(fs, dir, err => {
if (err) {
callback(err);
return;
}
fs.mkdir(p, err => {
if (err) {
if (err.code === "EEXIST") {
callback();
return;
}
callback(err);
return;
}
callback();
});
});
return;
} else if (err.code === "EEXIST") {
callback();
return;
}
callback(err);
return;
}
callback();
});
};
exports.mkdirp = mkdirp;
/**
* @param {IntermediateFileSystem} fs a file system
* @param {string} p an absolute path
* @returns {void}
*/
const mkdirpSync = (fs, p) => {
try {
fs.mkdirSync(p);
} catch (err) {
if (err) {
if (err.code === "ENOENT") {
const dir = dirname(fs, p);
if (dir === p) {
throw err;
}
mkdirpSync(fs, dir);
fs.mkdirSync(p);
return;
} else if (err.code === "EEXIST") {
return;
}
throw err;
}
}
};
exports.mkdirpSync = mkdirpSync;
/**
* @param {InputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {ReadJsonCallback} callback callback
* @returns {void}
*/
const readJson = (fs, p, callback) => {
if ("readJson" in fs) return fs.readJson(p, callback);
fs.readFile(p, (err, buf) => {
if (err) return callback(err);
let data;
try {
data = JSON.parse(buf.toString("utf-8"));
} catch (e) {
return callback(e);
}
return callback(null, data);
});
};
exports.readJson = readJson;
/**
* @param {InputFileSystem} fs a file system
* @param {string} p an absolute path
* @param {ReadJsonCallback} callback callback
* @returns {void}
*/
const lstatReadlinkAbsolute = (fs, p, callback) => {
let i = 3;
const doReadLink = () => {
fs.readlink(p, (err, target) => {
if (err && --i > 0) {
// It might was just changed from symlink to file
// we retry 2 times to catch this case before throwing the error
return doStat();
}
if (err || !target) return doStat();
const value = target.toString();
callback(null, join(fs, dirname(fs, p), value));
});
};
const doStat = () => {
if ("lstat" in fs) {
return fs.lstat(p, (err, stats) => {
if (err) return callback(err);
if (stats.isSymbolicLink()) {
return doReadLink();
}
callback(null, stats);
});
} else {
return fs.stat(p, callback);
}
};
if ("lstat" in fs) return doStat();
doReadLink();
};
exports.lstatReadlinkAbsolute = lstatReadlinkAbsolute;

68
node_modules/webpack/lib/util/hash/BatchedHash.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hash = require("../Hash");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
class BatchedHash extends Hash {
constructor(hash) {
super();
this.string = undefined;
this.encoding = undefined;
this.hash = hash;
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (this.string !== undefined) {
if (
typeof data === "string" &&
inputEncoding === this.encoding &&
this.string.length + data.length < MAX_SHORT_STRING
) {
this.string += data;
return this;
}
this.hash.update(this.string, this.encoding);
this.string = undefined;
}
if (typeof data === "string") {
if (
data.length < MAX_SHORT_STRING &&
// base64 encoding is not valid since it may contain padding chars
(!inputEncoding || !inputEncoding.startsWith("ba"))
) {
this.string = data;
this.encoding = inputEncoding;
} else {
this.hash.update(data, inputEncoding);
}
} else {
this.hash.update(data);
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
*/
digest(encoding) {
if (this.string !== undefined) {
this.hash.update(this.string, this.encoding);
}
return this.hash.digest(encoding);
}
}
module.exports = BatchedHash;

20
node_modules/webpack/lib/util/hash/md4.js generated vendored Normal file

File diff suppressed because one or more lines are too long

163
node_modules/webpack/lib/util/hash/wasm-hash.js generated vendored Normal file
View File

@@ -0,0 +1,163 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// 65536 is the size of a wasm memory page
// 64 is the maximum chunk size for every possible wasm hash implementation
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
class WasmHash {
/**
* @param {WebAssembly.Instance} instance wasm instance
* @param {WebAssembly.Instance[]} instancesPool pool of instances
* @param {number} chunkSize size of data chunks passed to wasm
* @param {number} digestSize size of digest returned by wasm
*/
constructor(instance, instancesPool, chunkSize, digestSize) {
const exports = /** @type {any} */ (instance.exports);
exports.init();
this.exports = exports;
this.mem = Buffer.from(exports.memory.buffer, 0, 65536);
this.buffered = 0;
this.instancesPool = instancesPool;
this.chunkSize = chunkSize;
this.digestSize = digestSize;
}
reset() {
this.buffered = 0;
this.exports.init();
}
/**
* @param {Buffer | string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {this} itself
*/
update(data, encoding) {
if (typeof data === "string") {
while (data.length > MAX_SHORT_STRING) {
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
data = data.slice(MAX_SHORT_STRING);
}
this._updateWithShortString(data, encoding);
return this;
}
this._updateWithBuffer(data);
return this;
}
/**
* @param {string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {void}
*/
_updateWithShortString(data, encoding) {
const { exports, buffered, mem, chunkSize } = this;
let endPos;
if (data.length < 70) {
if (!encoding || encoding === "utf-8" || encoding === "utf8") {
endPos = buffered;
for (let i = 0; i < data.length; i++) {
const cc = data.charCodeAt(i);
if (cc < 0x80) mem[endPos++] = cc;
else if (cc < 0x800) {
mem[endPos] = (cc >> 6) | 0xc0;
mem[endPos + 1] = (cc & 0x3f) | 0x80;
endPos += 2;
} else {
// bail-out for weird chars
endPos += mem.write(data.slice(i), endPos, encoding);
break;
}
}
} else if (encoding === "latin1") {
endPos = buffered;
for (let i = 0; i < data.length; i++) {
const cc = data.charCodeAt(i);
mem[endPos++] = cc;
}
} else {
endPos = buffered + mem.write(data, buffered, encoding);
}
} else {
endPos = buffered + mem.write(data, buffered, encoding);
}
if (endPos < chunkSize) {
this.buffered = endPos;
} else {
const l = endPos & ~(this.chunkSize - 1);
exports.update(l);
const newBuffered = endPos - l;
this.buffered = newBuffered;
if (newBuffered > 0) mem.copyWithin(0, l, endPos);
}
}
/**
* @param {Buffer} data data
* @returns {void}
*/
_updateWithBuffer(data) {
const { exports, buffered, mem } = this;
const length = data.length;
if (buffered + length < this.chunkSize) {
data.copy(mem, buffered, 0, length);
this.buffered += length;
} else {
const l = (buffered + length) & ~(this.chunkSize - 1);
if (l > 65536) {
let i = 65536 - buffered;
data.copy(mem, buffered, 0, i);
exports.update(65536);
const stop = l - buffered - 65536;
while (i < stop) {
data.copy(mem, 0, i, i + 65536);
exports.update(65536);
i += 65536;
}
data.copy(mem, 0, i, l - buffered);
exports.update(l - buffered - i);
} else {
data.copy(mem, buffered, 0, l - buffered);
exports.update(l);
}
const newBuffered = length + buffered - l;
this.buffered = newBuffered;
if (newBuffered > 0) data.copy(mem, 0, length - newBuffered, length);
}
}
digest(type) {
const { exports, buffered, mem, digestSize } = this;
exports.final(buffered);
this.instancesPool.push(this);
const hex = mem.toString("latin1", 0, digestSize);
if (type === "hex") return hex;
if (type === "binary" || !type) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(type);
}
}
const create = (wasmModule, instancesPool, chunkSize, digestSize) => {
if (instancesPool.length > 0) {
const old = instancesPool.pop();
old.reset();
return old;
} else {
return new WasmHash(
new WebAssembly.Instance(wasmModule),
instancesPool,
chunkSize,
digestSize
);
}
};
module.exports = create;
module.exports.MAX_SHORT_STRING = MAX_SHORT_STRING;

20
node_modules/webpack/lib/util/hash/xxhash64.js generated vendored Normal file

File diff suppressed because one or more lines are too long

377
node_modules/webpack/lib/util/identifier.js generated vendored Normal file
View File

@@ -0,0 +1,377 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const path = require("path");
const WINDOWS_ABS_PATH_REGEXP = /^[a-zA-Z]:[\\/]/;
const SEGMENTS_SPLIT_REGEXP = /([|!])/;
const WINDOWS_PATH_SEPARATOR_REGEXP = /\\/g;
/**
* @typedef {Object} MakeRelativePathsCache
* @property {Map<string, Map<string, string>>=} relativePaths
*/
const relativePathToRequest = relativePath => {
if (relativePath === "") return "./.";
if (relativePath === "..") return "../.";
if (relativePath.startsWith("../")) return relativePath;
return `./${relativePath}`;
};
/**
* @param {string} context context for relative path
* @param {string} maybeAbsolutePath path to make relative
* @returns {string} relative path in request style
*/
const absoluteToRequest = (context, maybeAbsolutePath) => {
if (maybeAbsolutePath[0] === "/") {
if (
maybeAbsolutePath.length > 1 &&
maybeAbsolutePath[maybeAbsolutePath.length - 1] === "/"
) {
// this 'path' is actually a regexp generated by dynamic requires.
// Don't treat it as an absolute path.
return maybeAbsolutePath;
}
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = relativePathToRequest(path.posix.relative(context, resource));
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
if (WINDOWS_ABS_PATH_REGEXP.test(maybeAbsolutePath)) {
const querySplitPos = maybeAbsolutePath.indexOf("?");
let resource =
querySplitPos === -1
? maybeAbsolutePath
: maybeAbsolutePath.slice(0, querySplitPos);
resource = path.win32.relative(context, resource);
if (!WINDOWS_ABS_PATH_REGEXP.test(resource)) {
resource = relativePathToRequest(
resource.replace(WINDOWS_PATH_SEPARATOR_REGEXP, "/")
);
}
return querySplitPos === -1
? resource
: resource + maybeAbsolutePath.slice(querySplitPos);
}
// not an absolute path
return maybeAbsolutePath;
};
/**
* @param {string} context context for relative path
* @param {string} relativePath path
* @returns {string} absolute path
*/
const requestToAbsolute = (context, relativePath) => {
if (relativePath.startsWith("./") || relativePath.startsWith("../"))
return path.join(context, relativePath);
return relativePath;
};
const makeCacheable = realFn => {
/** @type {WeakMap<object, Map<string, ParsedResource>>} */
const cache = new WeakMap();
const getCache = associatedObjectForCache => {
const entry = cache.get(associatedObjectForCache);
if (entry !== undefined) return entry;
/** @type {Map<string, ParsedResource>} */
const map = new Map();
cache.set(associatedObjectForCache, map);
return map;
};
/**
* @param {string} str the path with query and fragment
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
* @returns {ParsedResource} parsed parts
*/
const fn = (str, associatedObjectForCache) => {
if (!associatedObjectForCache) return realFn(str);
const cache = getCache(associatedObjectForCache);
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
fn.bindCache = associatedObjectForCache => {
const cache = getCache(associatedObjectForCache);
return str => {
const entry = cache.get(str);
if (entry !== undefined) return entry;
const result = realFn(str);
cache.set(str, result);
return result;
};
};
return fn;
};
const makeCacheableWithContext = fn => {
/** @type {WeakMap<object, Map<string, Map<string, string>>>} */
const cache = new WeakMap();
/**
* @param {string} context context used to create relative path
* @param {string} identifier identifier used to create relative path
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
* @returns {string} the returned relative path
*/
const cachedFn = (context, identifier, associatedObjectForCache) => {
if (!associatedObjectForCache) return fn(context, identifier);
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
} else {
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
}
};
/**
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
* @returns {function(string, string): string} cached function
*/
cachedFn.bindCache = associatedObjectForCache => {
let innerCache;
if (associatedObjectForCache) {
innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
} else {
innerCache = new Map();
}
/**
* @param {string} context context used to create relative path
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = (context, identifier) => {
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
} else {
cachedResult = innerSubCache.get(identifier);
}
if (cachedResult !== undefined) {
return cachedResult;
} else {
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
}
};
return boundFn;
};
/**
* @param {string} context context used to create relative path
* @param {Object=} associatedObjectForCache an object to which the cache will be attached
* @returns {function(string): string} cached function
*/
cachedFn.bindContextCache = (context, associatedObjectForCache) => {
let innerSubCache;
if (associatedObjectForCache) {
let innerCache = cache.get(associatedObjectForCache);
if (innerCache === undefined) {
innerCache = new Map();
cache.set(associatedObjectForCache, innerCache);
}
innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
innerCache.set(context, (innerSubCache = new Map()));
}
} else {
innerSubCache = new Map();
}
/**
* @param {string} identifier identifier used to create relative path
* @returns {string} the returned relative path
*/
const boundFn = identifier => {
const cachedResult = innerSubCache.get(identifier);
if (cachedResult !== undefined) {
return cachedResult;
} else {
const result = fn(context, identifier);
innerSubCache.set(identifier, result);
return result;
}
};
return boundFn;
};
return cachedFn;
};
/**
*
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsRelative = (context, identifier) => {
return identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => absoluteToRequest(context, str))
.join("");
};
exports.makePathsRelative = makeCacheableWithContext(_makePathsRelative);
/**
*
* @param {string} context context for relative path
* @param {string} identifier identifier for path
* @returns {string} a converted relative path
*/
const _makePathsAbsolute = (context, identifier) => {
return identifier
.split(SEGMENTS_SPLIT_REGEXP)
.map(str => requestToAbsolute(context, str))
.join("");
};
exports.makePathsAbsolute = makeCacheableWithContext(_makePathsAbsolute);
/**
* @param {string} context absolute context path
* @param {string} request any request string may containing absolute paths, query string, etc.
* @returns {string} a new request string avoiding absolute paths when possible
*/
const _contextify = (context, request) => {
return request
.split("!")
.map(r => absoluteToRequest(context, r))
.join("!");
};
const contextify = makeCacheableWithContext(_contextify);
exports.contextify = contextify;
/**
* @param {string} context absolute context path
* @param {string} request any request string
* @returns {string} a new request string using absolute paths when possible
*/
const _absolutify = (context, request) => {
return request
.split("!")
.map(r => requestToAbsolute(context, r))
.join("!");
};
const absolutify = makeCacheableWithContext(_absolutify);
exports.absolutify = absolutify;
const PATH_QUERY_FRAGMENT_REGEXP =
/^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/;
const PATH_QUERY_REGEXP = /^((?:\0.|[^?\0])*)(\?.*)?$/;
/** @typedef {{ resource: string, path: string, query: string, fragment: string }} ParsedResource */
/** @typedef {{ resource: string, path: string, query: string }} ParsedResourceWithoutFragment */
/**
* @param {string} str the path with query and fragment
* @returns {ParsedResource} parsed parts
*/
const _parseResource = str => {
const match = PATH_QUERY_FRAGMENT_REGEXP.exec(str);
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : "",
fragment: match[3] || ""
};
};
exports.parseResource = makeCacheable(_parseResource);
/**
* Parse resource, skips fragment part
* @param {string} str the path with query and fragment
* @returns {ParsedResourceWithoutFragment} parsed parts
*/
const _parseResourceWithoutFragment = str => {
const match = PATH_QUERY_REGEXP.exec(str);
return {
resource: str,
path: match[1].replace(/\0(.)/g, "$1"),
query: match[2] ? match[2].replace(/\0(.)/g, "$1") : ""
};
};
exports.parseResourceWithoutFragment = makeCacheable(
_parseResourceWithoutFragment
);
/**
* @param {string} filename the filename which should be undone
* @param {string} outputPath the output path that is restored (only relevant when filename contains "..")
* @param {boolean} enforceRelative true returns ./ for empty paths
* @returns {string} repeated ../ to leave the directory of the provided filename to be back on output dir
*/
exports.getUndoPath = (filename, outputPath, enforceRelative) => {
let depth = -1;
let append = "";
outputPath = outputPath.replace(/[\\/]$/, "");
for (const part of filename.split(/[/\\]+/)) {
if (part === "..") {
if (depth > -1) {
depth--;
} else {
const i = outputPath.lastIndexOf("/");
const j = outputPath.lastIndexOf("\\");
const pos = i < 0 ? j : j < 0 ? i : Math.max(i, j);
if (pos < 0) return outputPath + "/";
append = outputPath.slice(pos + 1) + "/" + append;
outputPath = outputPath.slice(0, pos);
}
} else if (part !== ".") {
depth++;
}
}
return depth > 0
? `${"../".repeat(depth)}${append}`
: enforceRelative
? `./${append}`
: append;
};

213
node_modules/webpack/lib/util/internalSerializables.js generated vendored Normal file
View File

@@ -0,0 +1,213 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
// We need to include a list of requires here
// to allow webpack to be bundled with only static requires
// We could use a dynamic require(`../${request}`) but this
// would include too many modules and not every tool is able
// to process this
module.exports = {
AsyncDependenciesBlock: () => require("../AsyncDependenciesBlock"),
CommentCompilationWarning: () => require("../CommentCompilationWarning"),
ContextModule: () => require("../ContextModule"),
"cache/PackFileCacheStrategy": () =>
require("../cache/PackFileCacheStrategy"),
"cache/ResolverCachePlugin": () => require("../cache/ResolverCachePlugin"),
"container/ContainerEntryDependency": () =>
require("../container/ContainerEntryDependency"),
"container/ContainerEntryModule": () =>
require("../container/ContainerEntryModule"),
"container/ContainerExposedDependency": () =>
require("../container/ContainerExposedDependency"),
"container/FallbackDependency": () =>
require("../container/FallbackDependency"),
"container/FallbackItemDependency": () =>
require("../container/FallbackItemDependency"),
"container/FallbackModule": () => require("../container/FallbackModule"),
"container/RemoteModule": () => require("../container/RemoteModule"),
"container/RemoteToExternalDependency": () =>
require("../container/RemoteToExternalDependency"),
"dependencies/AMDDefineDependency": () =>
require("../dependencies/AMDDefineDependency"),
"dependencies/AMDRequireArrayDependency": () =>
require("../dependencies/AMDRequireArrayDependency"),
"dependencies/AMDRequireContextDependency": () =>
require("../dependencies/AMDRequireContextDependency"),
"dependencies/AMDRequireDependenciesBlock": () =>
require("../dependencies/AMDRequireDependenciesBlock"),
"dependencies/AMDRequireDependency": () =>
require("../dependencies/AMDRequireDependency"),
"dependencies/AMDRequireItemDependency": () =>
require("../dependencies/AMDRequireItemDependency"),
"dependencies/CachedConstDependency": () =>
require("../dependencies/CachedConstDependency"),
"dependencies/CreateScriptUrlDependency": () =>
require("../dependencies/CreateScriptUrlDependency"),
"dependencies/CommonJsRequireContextDependency": () =>
require("../dependencies/CommonJsRequireContextDependency"),
"dependencies/CommonJsExportRequireDependency": () =>
require("../dependencies/CommonJsExportRequireDependency"),
"dependencies/CommonJsExportsDependency": () =>
require("../dependencies/CommonJsExportsDependency"),
"dependencies/CommonJsFullRequireDependency": () =>
require("../dependencies/CommonJsFullRequireDependency"),
"dependencies/CommonJsRequireDependency": () =>
require("../dependencies/CommonJsRequireDependency"),
"dependencies/CommonJsSelfReferenceDependency": () =>
require("../dependencies/CommonJsSelfReferenceDependency"),
"dependencies/ConstDependency": () =>
require("../dependencies/ConstDependency"),
"dependencies/ContextDependency": () =>
require("../dependencies/ContextDependency"),
"dependencies/ContextElementDependency": () =>
require("../dependencies/ContextElementDependency"),
"dependencies/CriticalDependencyWarning": () =>
require("../dependencies/CriticalDependencyWarning"),
"dependencies/CssImportDependency": () =>
require("../dependencies/CssImportDependency"),
"dependencies/CssLocalIdentifierDependency": () =>
require("../dependencies/CssLocalIdentifierDependency"),
"dependencies/CssSelfLocalIdentifierDependency": () =>
require("../dependencies/CssSelfLocalIdentifierDependency"),
"dependencies/CssExportDependency": () =>
require("../dependencies/CssExportDependency"),
"dependencies/CssUrlDependency": () =>
require("../dependencies/CssUrlDependency"),
"dependencies/DelegatedSourceDependency": () =>
require("../dependencies/DelegatedSourceDependency"),
"dependencies/DllEntryDependency": () =>
require("../dependencies/DllEntryDependency"),
"dependencies/EntryDependency": () =>
require("../dependencies/EntryDependency"),
"dependencies/ExportsInfoDependency": () =>
require("../dependencies/ExportsInfoDependency"),
"dependencies/HarmonyAcceptDependency": () =>
require("../dependencies/HarmonyAcceptDependency"),
"dependencies/HarmonyAcceptImportDependency": () =>
require("../dependencies/HarmonyAcceptImportDependency"),
"dependencies/HarmonyCompatibilityDependency": () =>
require("../dependencies/HarmonyCompatibilityDependency"),
"dependencies/HarmonyExportExpressionDependency": () =>
require("../dependencies/HarmonyExportExpressionDependency"),
"dependencies/HarmonyExportHeaderDependency": () =>
require("../dependencies/HarmonyExportHeaderDependency"),
"dependencies/HarmonyExportImportedSpecifierDependency": () =>
require("../dependencies/HarmonyExportImportedSpecifierDependency"),
"dependencies/HarmonyExportSpecifierDependency": () =>
require("../dependencies/HarmonyExportSpecifierDependency"),
"dependencies/HarmonyImportSideEffectDependency": () =>
require("../dependencies/HarmonyImportSideEffectDependency"),
"dependencies/HarmonyImportSpecifierDependency": () =>
require("../dependencies/HarmonyImportSpecifierDependency"),
"dependencies/HarmonyEvaluatedImportSpecifierDependency": () =>
require("../dependencies/HarmonyEvaluatedImportSpecifierDependency"),
"dependencies/ImportContextDependency": () =>
require("../dependencies/ImportContextDependency"),
"dependencies/ImportDependency": () =>
require("../dependencies/ImportDependency"),
"dependencies/ImportEagerDependency": () =>
require("../dependencies/ImportEagerDependency"),
"dependencies/ImportWeakDependency": () =>
require("../dependencies/ImportWeakDependency"),
"dependencies/JsonExportsDependency": () =>
require("../dependencies/JsonExportsDependency"),
"dependencies/LocalModule": () => require("../dependencies/LocalModule"),
"dependencies/LocalModuleDependency": () =>
require("../dependencies/LocalModuleDependency"),
"dependencies/ModuleDecoratorDependency": () =>
require("../dependencies/ModuleDecoratorDependency"),
"dependencies/ModuleHotAcceptDependency": () =>
require("../dependencies/ModuleHotAcceptDependency"),
"dependencies/ModuleHotDeclineDependency": () =>
require("../dependencies/ModuleHotDeclineDependency"),
"dependencies/ImportMetaHotAcceptDependency": () =>
require("../dependencies/ImportMetaHotAcceptDependency"),
"dependencies/ImportMetaHotDeclineDependency": () =>
require("../dependencies/ImportMetaHotDeclineDependency"),
"dependencies/ImportMetaContextDependency": () =>
require("../dependencies/ImportMetaContextDependency"),
"dependencies/ProvidedDependency": () =>
require("../dependencies/ProvidedDependency"),
"dependencies/PureExpressionDependency": () =>
require("../dependencies/PureExpressionDependency"),
"dependencies/RequireContextDependency": () =>
require("../dependencies/RequireContextDependency"),
"dependencies/RequireEnsureDependenciesBlock": () =>
require("../dependencies/RequireEnsureDependenciesBlock"),
"dependencies/RequireEnsureDependency": () =>
require("../dependencies/RequireEnsureDependency"),
"dependencies/RequireEnsureItemDependency": () =>
require("../dependencies/RequireEnsureItemDependency"),
"dependencies/RequireHeaderDependency": () =>
require("../dependencies/RequireHeaderDependency"),
"dependencies/RequireIncludeDependency": () =>
require("../dependencies/RequireIncludeDependency"),
"dependencies/RequireIncludeDependencyParserPlugin": () =>
require("../dependencies/RequireIncludeDependencyParserPlugin"),
"dependencies/RequireResolveContextDependency": () =>
require("../dependencies/RequireResolveContextDependency"),
"dependencies/RequireResolveDependency": () =>
require("../dependencies/RequireResolveDependency"),
"dependencies/RequireResolveHeaderDependency": () =>
require("../dependencies/RequireResolveHeaderDependency"),
"dependencies/RuntimeRequirementsDependency": () =>
require("../dependencies/RuntimeRequirementsDependency"),
"dependencies/StaticExportsDependency": () =>
require("../dependencies/StaticExportsDependency"),
"dependencies/SystemPlugin": () => require("../dependencies/SystemPlugin"),
"dependencies/UnsupportedDependency": () =>
require("../dependencies/UnsupportedDependency"),
"dependencies/URLDependency": () => require("../dependencies/URLDependency"),
"dependencies/WebAssemblyExportImportedDependency": () =>
require("../dependencies/WebAssemblyExportImportedDependency"),
"dependencies/WebAssemblyImportDependency": () =>
require("../dependencies/WebAssemblyImportDependency"),
"dependencies/WebpackIsIncludedDependency": () =>
require("../dependencies/WebpackIsIncludedDependency"),
"dependencies/WorkerDependency": () =>
require("../dependencies/WorkerDependency"),
"json/JsonData": () => require("../json/JsonData"),
"optimize/ConcatenatedModule": () =>
require("../optimize/ConcatenatedModule"),
DelegatedModule: () => require("../DelegatedModule"),
DependenciesBlock: () => require("../DependenciesBlock"),
DllModule: () => require("../DllModule"),
ExternalModule: () => require("../ExternalModule"),
FileSystemInfo: () => require("../FileSystemInfo"),
InitFragment: () => require("../InitFragment"),
InvalidDependenciesModuleWarning: () =>
require("../InvalidDependenciesModuleWarning"),
Module: () => require("../Module"),
ModuleBuildError: () => require("../ModuleBuildError"),
ModuleDependencyWarning: () => require("../ModuleDependencyWarning"),
ModuleError: () => require("../ModuleError"),
ModuleGraph: () => require("../ModuleGraph"),
ModuleParseError: () => require("../ModuleParseError"),
ModuleWarning: () => require("../ModuleWarning"),
NormalModule: () => require("../NormalModule"),
RawDataUrlModule: () => require("../asset/RawDataUrlModule"),
RawModule: () => require("../RawModule"),
"sharing/ConsumeSharedModule": () =>
require("../sharing/ConsumeSharedModule"),
"sharing/ConsumeSharedFallbackDependency": () =>
require("../sharing/ConsumeSharedFallbackDependency"),
"sharing/ProvideSharedModule": () =>
require("../sharing/ProvideSharedModule"),
"sharing/ProvideSharedDependency": () =>
require("../sharing/ProvideSharedDependency"),
"sharing/ProvideForSharedDependency": () =>
require("../sharing/ProvideForSharedDependency"),
UnsupportedFeatureWarning: () => require("../UnsupportedFeatureWarning"),
"util/LazySet": () => require("../util/LazySet"),
UnhandledSchemeError: () => require("../UnhandledSchemeError"),
NodeStuffInWebError: () => require("../NodeStuffInWebError"),
WebpackError: () => require("../WebpackError"),
"util/registerExternalSerializer": () => {
// already registered
}
};

30
node_modules/webpack/lib/util/makeSerializable.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const { register } = require("./serialization");
class ClassSerializer {
constructor(Constructor) {
this.Constructor = Constructor;
}
serialize(obj, context) {
obj.serialize(context);
}
deserialize(context) {
if (typeof this.Constructor.deserialize === "function") {
return this.Constructor.deserialize(context);
}
const obj = new this.Constructor();
obj.deserialize(context);
return obj;
}
}
module.exports = (Constructor, request, name = null) => {
register(Constructor, request, name, new ClassSerializer(Constructor));
};

32
node_modules/webpack/lib/util/memoize.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
/** @template T @typedef {function(): T} FunctionReturning */
/**
* @template T
* @param {FunctionReturning<T>} fn memorized function
* @returns {FunctionReturning<T>} new function
*/
const memoize = fn => {
let cache = false;
/** @type {T} */
let result = undefined;
return () => {
if (cache) {
return result;
} else {
result = fn();
cache = true;
// Allow to clean up memory for fn
// and all dependent resources
fn = undefined;
return result;
}
};
};
module.exports = memoize;

22
node_modules/webpack/lib/util/nonNumericOnlyHash.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Ivan Kopeykin @vankop
*/
"use strict";
const A_CODE = "a".charCodeAt(0);
/**
* @param {string} hash hash
* @param {number} hashLength hash length
* @returns {string} returns hash that has at least one non numeric char
*/
module.exports = (hash, hashLength) => {
if (hashLength < 1) return "";
const slice = hash.slice(0, hashLength);
if (slice.match(/[^\d]/)) return slice;
return `${String.fromCharCode(
A_CODE + (parseInt(hash[0], 10) % 6)
)}${slice.slice(1)}`;
};

45
node_modules/webpack/lib/util/numberHash.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SAFE_LIMIT = 0x80000000;
const SAFE_PART = SAFE_LIMIT - 1;
const COUNT = 4;
const arr = [0, 0, 0, 0, 0];
const primes = [3, 7, 17, 19];
module.exports = (str, range) => {
arr.fill(0);
for (let i = 0; i < str.length; i++) {
const c = str.charCodeAt(i);
for (let j = 0; j < COUNT; j++) {
const p = (j + COUNT - 1) % COUNT;
arr[j] = (arr[j] + c * primes[j] + arr[p]) & SAFE_PART;
}
for (let j = 0; j < COUNT; j++) {
const q = arr[j] % COUNT;
arr[j] = arr[j] ^ (arr[q] >> 1);
}
}
if (range <= SAFE_PART) {
let sum = 0;
for (let j = 0; j < COUNT; j++) {
sum = (sum + arr[j]) % range;
}
return sum;
} else {
let sum1 = 0;
let sum2 = 0;
const rangeExt = Math.floor(range / SAFE_LIMIT);
for (let j = 0; j < COUNT; j += 2) {
sum1 = (sum1 + arr[j]) & SAFE_PART;
}
for (let j = 1; j < COUNT; j += 2) {
sum2 = (sum2 + arr[j]) % rangeExt;
}
return (sum2 * SAFE_LIMIT + sum1) % range;
}
};

15
node_modules/webpack/lib/util/objectToMap.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
/**
* Convert an object into an ES6 map
*
* @param {object} obj any object type that works with Object.entries()
* @returns {Map<string, any>} an ES6 Map of KV pairs
*/
module.exports = function objectToMap(obj) {
return new Map(Object.entries(obj));
};

62
node_modules/webpack/lib/util/processAsyncTree.js generated vendored Normal file
View File

@@ -0,0 +1,62 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @template T
* @template {Error} E
* @param {Iterable<T>} items initial items
* @param {number} concurrency number of items running in parallel
* @param {function(T, function(T): void, function(E=): void): void} processor worker which pushes more items
* @param {function(E=): void} callback all items processed
* @returns {void}
*/
const processAsyncTree = (items, concurrency, processor, callback) => {
const queue = Array.from(items);
if (queue.length === 0) return callback();
let processing = 0;
let finished = false;
let processScheduled = true;
const push = item => {
queue.push(item);
if (!processScheduled && processing < concurrency) {
processScheduled = true;
process.nextTick(processQueue);
}
};
const processorCallback = err => {
processing--;
if (err && !finished) {
finished = true;
callback(err);
return;
}
if (!processScheduled) {
processScheduled = true;
process.nextTick(processQueue);
}
};
const processQueue = () => {
if (finished) return;
while (processing < concurrency && queue.length > 0) {
processing++;
const item = queue.pop();
processor(item, push, processorCallback);
}
processScheduled = false;
if (queue.length === 0 && processing === 0 && !finished) {
finished = true;
callback();
}
};
processQueue();
};
module.exports = processAsyncTree;

78
node_modules/webpack/lib/util/propertyAccess.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SAFE_IDENTIFIER = /^[_a-zA-Z$][_a-zA-Z$0-9]*$/;
const RESERVED_IDENTIFIER = new Set([
"break",
"case",
"catch",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"else",
"export",
"extends",
"finally",
"for",
"function",
"if",
"import",
"in",
"instanceof",
"new",
"return",
"super",
"switch",
"this",
"throw",
"try",
"typeof",
"var",
"void",
"while",
"with",
"enum",
// strict mode
"implements",
"interface",
"let",
"package",
"private",
"protected",
"public",
"static",
"yield",
"yield",
// module code
"await",
// skip future reserved keywords defined under ES1 till ES3
// additional
"null",
"true",
"false"
]);
const propertyAccess = (properties, start = 0) => {
let str = "";
for (let i = start; i < properties.length; i++) {
const p = properties[i];
if (`${+p}` === p) {
str += `[${p}]`;
} else if (SAFE_IDENTIFIER.test(p) && !RESERVED_IDENTIFIER.has(p)) {
str += `.${p}`;
} else {
str += `[${JSON.stringify(p)}]`;
}
}
return str;
};
module.exports = propertyAccess;

View File

@@ -0,0 +1,337 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { register } = require("./serialization");
const Position = /** @type {TODO} */ (require("acorn")).Position;
const SourceLocation = require("acorn").SourceLocation;
const ValidationError = require("schema-utils/dist/ValidationError").default;
const {
CachedSource,
ConcatSource,
OriginalSource,
PrefixSource,
RawSource,
ReplaceSource,
SourceMapSource
} = require("webpack-sources");
/** @typedef {import("acorn").Position} Position */
/** @typedef {import("../Dependency").RealDependencyLocation} RealDependencyLocation */
/** @typedef {import("../Dependency").SourcePosition} SourcePosition */
/** @typedef {import("./serialization").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("./serialization").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {ObjectSerializerContext & { writeLazy?: (any) => void }} WebpackObjectSerializerContext */
const CURRENT_MODULE = "webpack/lib/util/registerExternalSerializer";
register(
CachedSource,
CURRENT_MODULE,
"webpack-sources/CachedSource",
new (class CachedSourceSerializer {
/**
* @param {CachedSource} source the cached source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write, writeLazy }) {
if (writeLazy) {
writeLazy(source.originalLazy());
} else {
write(source.original());
}
write(source.getCachedData());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {CachedSource} cached source
*/
deserialize({ read }) {
const source = read();
const cachedData = read();
return new CachedSource(source, cachedData);
}
})()
);
register(
RawSource,
CURRENT_MODULE,
"webpack-sources/RawSource",
new (class RawSourceSerializer {
/**
* @param {RawSource} source the raw source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.buffer());
write(!source.isBuffer());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {RawSource} raw source
*/
deserialize({ read }) {
const source = read();
const convertToString = read();
return new RawSource(source, convertToString);
}
})()
);
register(
ConcatSource,
CURRENT_MODULE,
"webpack-sources/ConcatSource",
new (class ConcatSourceSerializer {
/**
* @param {ConcatSource} source the concat source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getChildren());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {ConcatSource} concat source
*/
deserialize({ read }) {
const source = new ConcatSource();
source.addAllSkipOptimizing(read());
return source;
}
})()
);
register(
PrefixSource,
CURRENT_MODULE,
"webpack-sources/PrefixSource",
new (class PrefixSourceSerializer {
/**
* @param {PrefixSource} source the prefix source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getPrefix());
write(source.original());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {PrefixSource} prefix source
*/
deserialize({ read }) {
return new PrefixSource(read(), read());
}
})()
);
register(
ReplaceSource,
CURRENT_MODULE,
"webpack-sources/ReplaceSource",
new (class ReplaceSourceSerializer {
/**
* @param {ReplaceSource} source the replace source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.original());
write(source.getName());
const replacements = source.getReplacements();
write(replacements.length);
for (const repl of replacements) {
write(repl.start);
write(repl.end);
}
for (const repl of replacements) {
write(repl.content);
write(repl.name);
}
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {ReplaceSource} replace source
*/
deserialize({ read }) {
const source = new ReplaceSource(read(), read());
const len = read();
const startEndBuffer = [];
for (let i = 0; i < len; i++) {
startEndBuffer.push(read(), read());
}
let j = 0;
for (let i = 0; i < len; i++) {
source.replace(
startEndBuffer[j++],
startEndBuffer[j++],
read(),
read()
);
}
return source;
}
})()
);
register(
OriginalSource,
CURRENT_MODULE,
"webpack-sources/OriginalSource",
new (class OriginalSourceSerializer {
/**
* @param {OriginalSource} source the original source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.buffer());
write(source.getName());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {OriginalSource} original source
*/
deserialize({ read }) {
const buffer = read();
const name = read();
return new OriginalSource(buffer, name);
}
})()
);
register(
SourceLocation,
CURRENT_MODULE,
"acorn/SourceLocation",
new (class SourceLocationSerializer {
/**
* @param {SourceLocation} loc the location to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(loc, { write }) {
write(loc.start.line);
write(loc.start.column);
write(loc.end.line);
write(loc.end.column);
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {RealDependencyLocation} location
*/
deserialize({ read }) {
return {
start: {
line: read(),
column: read()
},
end: {
line: read(),
column: read()
}
};
}
})()
);
register(
Position,
CURRENT_MODULE,
"acorn/Position",
new (class PositionSerializer {
/**
* @param {Position} pos the position to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(pos, { write }) {
write(pos.line);
write(pos.column);
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {SourcePosition} position
*/
deserialize({ read }) {
return {
line: read(),
column: read()
};
}
})()
);
register(
SourceMapSource,
CURRENT_MODULE,
"webpack-sources/SourceMapSource",
new (class SourceMapSourceSerializer {
/**
* @param {SourceMapSource} source the source map source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(source, { write }) {
write(source.getArgsAsBuffers());
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {SourceMapSource} source source map source
*/
deserialize({ read }) {
// @ts-expect-error
return new SourceMapSource(...read());
}
})()
);
register(
ValidationError,
CURRENT_MODULE,
"schema-utils/ValidationError",
new (class ValidationErrorSerializer {
// TODO error should be ValidationError, but this fails the type checks
/**
* @param {TODO} error the source map source to be serialized
* @param {WebpackObjectSerializerContext} context context
* @returns {void}
*/
serialize(error, { write }) {
write(error.errors);
write(error.schema);
write({
name: error.headerName,
baseDataPath: error.baseDataPath,
postFormatter: error.postFormatter
});
}
/**
* @param {ObjectDeserializerContext} context context
* @returns {TODO} error
*/
deserialize({ read }) {
return new ValidationError(read(), read(), read());
}
})()
);

623
node_modules/webpack/lib/util/runtime.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

484
node_modules/webpack/lib/util/semver.js generated vendored Normal file
View File

@@ -0,0 +1,484 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {(string|number|undefined|[])[]} SemVerRange */
/**
* @param {string} str version string
* @returns {(string|number|undefined|[])[]} parsed version
*/
const parseVersion = str => {
var splitAndConvert = function (str) {
return str.split(".").map(function (item) {
// eslint-disable-next-line eqeqeq
return +item == item ? +item : item;
});
};
var match = /^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(str);
/** @type {(string|number|undefined|[])[]} */
var ver = match[1] ? splitAndConvert(match[1]) : [];
if (match[2]) {
ver.length++;
ver.push.apply(ver, splitAndConvert(match[2]));
}
if (match[3]) {
ver.push([]);
ver.push.apply(ver, splitAndConvert(match[3]));
}
return ver;
};
exports.parseVersion = parseVersion;
/* eslint-disable eqeqeq */
/**
* @param {string} a version
* @param {string} b version
* @returns {boolean} true, iff a < b
*/
const versionLt = (a, b) => {
// @ts-expect-error
a = parseVersion(a);
// @ts-expect-error
b = parseVersion(b);
var i = 0;
for (;;) {
// a b EOA object undefined number string
// EOA a == b a < b b < a a < b a < b
// object b < a (0) b < a a < b a < b
// undefined a < b a < b (0) a < b a < b
// number b < a b < a b < a (1) a < b
// string b < a b < a b < a b < a (1)
// EOA end of array
// (0) continue on
// (1) compare them via "<"
// Handles first row in table
if (i >= a.length) return i < b.length && (typeof b[i])[0] != "u";
var aValue = a[i];
var aType = (typeof aValue)[0];
// Handles first column in table
if (i >= b.length) return aType == "u";
var bValue = b[i];
var bType = (typeof bValue)[0];
if (aType == bType) {
if (aType != "o" && aType != "u" && aValue != bValue) {
return aValue < bValue;
}
i++;
} else {
// Handles remaining cases
if (aType == "o" && bType == "n") return true;
return bType == "s" || aType == "u";
}
}
};
/* eslint-enable eqeqeq */
exports.versionLt = versionLt;
/**
* @param {string} str range string
* @returns {SemVerRange} parsed range
*/
exports.parseRange = str => {
const splitAndConvert = str => {
return str
.split(".")
.map(item => (item !== "NaN" && `${+item}` === item ? +item : item));
};
// see https://docs.npmjs.com/misc/semver#range-grammar for grammar
const parsePartial = str => {
const match = /^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(str);
/** @type {(string|number|undefined|[])[]} */
const ver = match[1] ? [0, ...splitAndConvert(match[1])] : [0];
if (match[2]) {
ver.length++;
ver.push.apply(ver, splitAndConvert(match[2]));
}
// remove trailing any matchers
let last = ver[ver.length - 1];
while (
ver.length &&
(last === undefined || /^[*xX]$/.test(/** @type {string} */ (last)))
) {
ver.pop();
last = ver[ver.length - 1];
}
return ver;
};
const toFixed = range => {
if (range.length === 1) {
// Special case for "*" is "x.x.x" instead of "="
return [0];
} else if (range.length === 2) {
// Special case for "1" is "1.x.x" instead of "=1"
return [1, ...range.slice(1)];
} else if (range.length === 3) {
// Special case for "1.2" is "1.2.x" instead of "=1.2"
return [2, ...range.slice(1)];
} else {
return [range.length, ...range.slice(1)];
}
};
const negate = range => {
return [-range[0] - 1, ...range.slice(1)];
};
const parseSimple = str => {
// simple ::= primitive | partial | tilde | caret
// primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | '!' ) ( ' ' ) * partial
// tilde ::= '~' ( ' ' ) * partial
// caret ::= '^' ( ' ' ) * partial
const match = /^(\^|~|<=|<|>=|>|=|v|!)/.exec(str);
const start = match ? match[0] : "";
const remainder = parsePartial(
start.length ? str.slice(start.length).trim() : str.trim()
);
switch (start) {
case "^":
if (remainder.length > 1 && remainder[1] === 0) {
if (remainder.length > 2 && remainder[2] === 0) {
return [3, ...remainder.slice(1)];
}
return [2, ...remainder.slice(1)];
}
return [1, ...remainder.slice(1)];
case "~":
return [2, ...remainder.slice(1)];
case ">=":
return remainder;
case "=":
case "v":
case "":
return toFixed(remainder);
case "<":
return negate(remainder);
case ">": {
// and( >=, not( = ) ) => >=, =, not, and
const fixed = toFixed(remainder);
// eslint-disable-next-line no-sparse-arrays
return [, fixed, 0, remainder, 2];
}
case "<=":
// or( <, = ) => <, =, or
// eslint-disable-next-line no-sparse-arrays
return [, toFixed(remainder), negate(remainder), 1];
case "!": {
// not =
const fixed = toFixed(remainder);
// eslint-disable-next-line no-sparse-arrays
return [, fixed, 0];
}
default:
throw new Error("Unexpected start value");
}
};
const combine = (items, fn) => {
if (items.length === 1) return items[0];
const arr = [];
for (const item of items.slice().reverse()) {
if (0 in item) {
arr.push(item);
} else {
arr.push(...item.slice(1));
}
}
// eslint-disable-next-line no-sparse-arrays
return [, ...arr, ...items.slice(1).map(() => fn)];
};
const parseRange = str => {
// range ::= hyphen | simple ( ' ' ( ' ' ) * simple ) * | ''
// hyphen ::= partial ( ' ' ) * ' - ' ( ' ' ) * partial
const items = str.split(/\s+-\s+/);
if (items.length === 1) {
const items = str
.trim()
.split(/(?<=[-0-9A-Za-z])\s+/g)
.map(parseSimple);
return combine(items, 2);
}
const a = parsePartial(items[0]);
const b = parsePartial(items[1]);
// >=a <=b => and( >=a, or( <b, =b ) ) => >=a, <b, =b, or, and
// eslint-disable-next-line no-sparse-arrays
return [, toFixed(b), negate(b), 1, a, 2];
};
const parseLogicalOr = str => {
// range-set ::= range ( logical-or range ) *
// logical-or ::= ( ' ' ) * '||' ( ' ' ) *
const items = str.split(/\s*\|\|\s*/).map(parseRange);
return combine(items, 1);
};
return parseLogicalOr(str);
};
/* eslint-disable eqeqeq */
const rangeToString = range => {
var fixCount = range[0];
var str = "";
if (range.length === 1) {
return "*";
} else if (fixCount + 0.5) {
str +=
fixCount == 0
? ">="
: fixCount == -1
? "<"
: fixCount == 1
? "^"
: fixCount == 2
? "~"
: fixCount > 0
? "="
: "!=";
var needDot = 1;
// eslint-disable-next-line no-redeclare
for (var i = 1; i < range.length; i++) {
var item = range[i];
var t = (typeof item)[0];
needDot--;
str +=
t == "u"
? // undefined: prerelease marker, add an "-"
"-"
: // number or string: add the item, set flag to add an "." between two of them
(needDot > 0 ? "." : "") + ((needDot = 2), item);
}
return str;
} else {
var stack = [];
// eslint-disable-next-line no-redeclare
for (var i = 1; i < range.length; i++) {
// eslint-disable-next-line no-redeclare
var item = range[i];
stack.push(
item === 0
? "not(" + pop() + ")"
: item === 1
? "(" + pop() + " || " + pop() + ")"
: item === 2
? stack.pop() + " " + stack.pop()
: rangeToString(item)
);
}
return pop();
}
function pop() {
return stack.pop().replace(/^\((.+)\)$/, "$1");
}
};
/* eslint-enable eqeqeq */
exports.rangeToString = rangeToString;
/* eslint-disable eqeqeq */
/**
* @param {SemVerRange} range version range
* @param {string} version the version
* @returns {boolean} if version satisfy the range
*/
const satisfy = (range, version) => {
if (0 in range) {
// @ts-expect-error
version = parseVersion(version);
var fixCount = range[0];
// when negated is set it swill set for < instead of >=
var negated = fixCount < 0;
if (negated) fixCount = -fixCount - 1;
for (var i = 0, j = 1, isEqual = true; ; j++, i++) {
// cspell:word nequal nequ
// when isEqual = true:
// range version: EOA/object undefined number string
// EOA equal block big-ver big-ver
// undefined bigger next big-ver big-ver
// number smaller block cmp big-cmp
// fixed number smaller block cmp-fix differ
// string smaller block differ cmp
// fixed string smaller block small-cmp cmp-fix
// when isEqual = false:
// range version: EOA/object undefined number string
// EOA nequal block next-ver next-ver
// undefined nequal block next-ver next-ver
// number nequal block next next
// fixed number nequal block next next (this never happens)
// string nequal block next next
// fixed string nequal block next next (this never happens)
// EOA end of array
// equal (version is equal range):
// when !negated: return true,
// when negated: return false
// bigger (version is bigger as range):
// when fixed: return false,
// when !negated: return true,
// when negated: return false,
// smaller (version is smaller as range):
// when !negated: return false,
// when negated: return true
// nequal (version is not equal range (> resp <)): return true
// block (version is in different prerelease area): return false
// differ (version is different from fixed range (string vs. number)): return false
// next: continues to the next items
// next-ver: when fixed: return false, continues to the next item only for the version, sets isEqual=false
// big-ver: when fixed || negated: return false, continues to the next item only for the version, sets isEqual=false
// next-nequ: continues to the next items, sets isEqual=false
// cmp (negated === false): version < range => return false, version > range => next-nequ, else => next
// cmp (negated === true): version > range => return false, version < range => next-nequ, else => next
// cmp-fix: version == range => next, else => return false
// big-cmp: when negated => return false, else => next-nequ
// small-cmp: when negated => next-nequ, else => return false
var rangeType = j < range.length ? (typeof range[j])[0] : "";
var versionValue;
var versionType;
// Handles first column in both tables (end of version or object)
if (
i >= version.length ||
((versionValue = version[i]),
(versionType = (typeof versionValue)[0]) == "o")
) {
// Handles nequal
if (!isEqual) return true;
// Handles bigger
if (rangeType == "u") return j > fixCount && !negated;
// Handles equal and smaller: (range === EOA) XOR negated
return (rangeType == "") != negated; // equal + smaller
}
// Handles second column in both tables (version = undefined)
if (versionType == "u") {
if (!isEqual || rangeType != "u") {
return false;
}
}
// switch between first and second table
else if (isEqual) {
// Handle diagonal
if (rangeType == versionType) {
if (j <= fixCount) {
// Handles "cmp-fix" cases
if (versionValue != range[j]) {
return false;
}
} else {
// Handles "cmp" cases
if (negated ? versionValue > range[j] : versionValue < range[j]) {
return false;
}
if (versionValue != range[j]) isEqual = false;
}
}
// Handle big-ver
else if (rangeType != "s" && rangeType != "n") {
if (negated || j <= fixCount) return false;
isEqual = false;
j--;
}
// Handle differ, big-cmp and small-cmp
else if (j <= fixCount || versionType < rangeType != negated) {
return false;
} else {
isEqual = false;
}
} else {
// Handles all "next-ver" cases in the second table
if (rangeType != "s" && rangeType != "n") {
isEqual = false;
j--;
}
// next is applied by default
}
}
}
/** @type {(boolean | number)[]} */
var stack = [];
var p = stack.pop.bind(stack);
// eslint-disable-next-line no-redeclare
for (var i = 1; i < range.length; i++) {
var item = /** @type {SemVerRange | 0 | 1 | 2} */ (range[i]);
stack.push(
item == 1
? p() | p()
: item == 2
? p() & p()
: item
? satisfy(item, version)
: !p()
);
}
return !!p();
};
/* eslint-enable eqeqeq */
exports.satisfy = satisfy;
exports.stringifyHoley = json => {
switch (typeof json) {
case "undefined":
return "";
case "object":
if (Array.isArray(json)) {
let str = "[";
for (let i = 0; i < json.length; i++) {
if (i !== 0) str += ",";
str += this.stringifyHoley(json[i]);
}
str += "]";
return str;
} else {
return JSON.stringify(json);
}
default:
return JSON.stringify(json);
}
};
//#region runtime code: parseVersion
exports.parseVersionRuntimeCode = runtimeTemplate =>
`var parseVersion = ${runtimeTemplate.basicFunction("str", [
"// see webpack/lib/util/semver.js for original code",
`var p=${
runtimeTemplate.supportsArrowFunction() ? "p=>" : "function(p)"
}{return p.split(".").map((${
runtimeTemplate.supportsArrowFunction() ? "p=>" : "function(p)"
}{return+p==p?+p:p}))},n=/^([^-+]+)?(?:-([^+]+))?(?:\\+(.+))?$/.exec(str),r=n[1]?p(n[1]):[];return n[2]&&(r.length++,r.push.apply(r,p(n[2]))),n[3]&&(r.push([]),r.push.apply(r,p(n[3]))),r;`
])}`;
//#endregion
//#region runtime code: versionLt
exports.versionLtRuntimeCode = runtimeTemplate =>
`var versionLt = ${runtimeTemplate.basicFunction("a, b", [
"// see webpack/lib/util/semver.js for original code",
'a=parseVersion(a),b=parseVersion(b);for(var r=0;;){if(r>=a.length)return r<b.length&&"u"!=(typeof b[r])[0];var e=a[r],n=(typeof e)[0];if(r>=b.length)return"u"==n;var t=b[r],f=(typeof t)[0];if(n!=f)return"o"==n&&"n"==f||("s"==f||"u"==n);if("o"!=n&&"u"!=n&&e!=t)return e<t;r++}'
])}`;
//#endregion
//#region runtime code: rangeToString
exports.rangeToStringRuntimeCode = runtimeTemplate =>
`var rangeToString = ${runtimeTemplate.basicFunction("range", [
"// see webpack/lib/util/semver.js for original code",
'var r=range[0],n="";if(1===range.length)return"*";if(r+.5){n+=0==r?">=":-1==r?"<":1==r?"^":2==r?"~":r>0?"=":"!=";for(var e=1,a=1;a<range.length;a++){e--,n+="u"==(typeof(t=range[a]))[0]?"-":(e>0?".":"")+(e=2,t)}return n}var g=[];for(a=1;a<range.length;a++){var t=range[a];g.push(0===t?"not("+o()+")":1===t?"("+o()+" || "+o()+")":2===t?g.pop()+" "+g.pop():rangeToString(t))}return o();function o(){return g.pop().replace(/^\\((.+)\\)$/,"$1")}'
])}`;
//#endregion
//#region runtime code: satisfy
exports.satisfyRuntimeCode = runtimeTemplate =>
`var satisfy = ${runtimeTemplate.basicFunction("range, version", [
"// see webpack/lib/util/semver.js for original code",
'if(0 in range){version=parseVersion(version);var e=range[0],r=e<0;r&&(e=-e-1);for(var n=0,i=1,a=!0;;i++,n++){var f,s,g=i<range.length?(typeof range[i])[0]:"";if(n>=version.length||"o"==(s=(typeof(f=version[n]))[0]))return!a||("u"==g?i>e&&!r:""==g!=r);if("u"==s){if(!a||"u"!=g)return!1}else if(a)if(g==s)if(i<=e){if(f!=range[i])return!1}else{if(r?f>range[i]:f<range[i])return!1;f!=range[i]&&(a=!1)}else if("s"!=g&&"n"!=g){if(r||i<=e)return!1;a=!1,i--}else{if(i<=e||s<g!=r)return!1;a=!1}else"s"!=g&&"n"!=g&&(a=!1,i--)}}var t=[],o=t.pop.bind(t);for(n=1;n<range.length;n++){var u=range[n];t.push(1==u?o()|o():2==u?o()&o():u?satisfy(u,version):!o())}return!!o();'
])}`;
//#endregion

128
node_modules/webpack/lib/util/serialization.js generated vendored Normal file
View File

@@ -0,0 +1,128 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
*/
"use strict";
const memoize = require("./memoize");
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_END_OPERATION_TYPE} MEASURE_END_OPERATION */
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_START_OPERATION_TYPE} MEASURE_START_OPERATION */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {import("../serialization/Serializer")} Serializer */
const getBinaryMiddleware = memoize(() =>
require("../serialization/BinaryMiddleware")
);
const getObjectMiddleware = memoize(() =>
require("../serialization/ObjectMiddleware")
);
const getSingleItemMiddleware = memoize(() =>
require("../serialization/SingleItemMiddleware")
);
const getSerializer = memoize(() => require("../serialization/Serializer"));
const getSerializerMiddleware = memoize(() =>
require("../serialization/SerializerMiddleware")
);
const getBinaryMiddlewareInstance = memoize(
() => new (getBinaryMiddleware())()
);
const registerSerializers = memoize(() => {
require("./registerExternalSerializer");
// Load internal paths with a relative require
// This allows bundling all internal serializers
const internalSerializables = require("./internalSerializables");
getObjectMiddleware().registerLoader(/^webpack\/lib\//, req => {
const loader = internalSerializables[req.slice("webpack/lib/".length)];
if (loader) {
loader();
} else {
console.warn(`${req} not found in internalSerializables`);
}
return true;
});
});
/** @type {Serializer} */
let buffersSerializer;
// Expose serialization API
module.exports = {
get register() {
return getObjectMiddleware().register;
},
get registerLoader() {
return getObjectMiddleware().registerLoader;
},
get registerNotSerializable() {
return getObjectMiddleware().registerNotSerializable;
},
get NOT_SERIALIZABLE() {
return getObjectMiddleware().NOT_SERIALIZABLE;
},
/** @type {MEASURE_START_OPERATION} */
get MEASURE_START_OPERATION() {
return getBinaryMiddleware().MEASURE_START_OPERATION;
},
/** @type {MEASURE_END_OPERATION} */
get MEASURE_END_OPERATION() {
return getBinaryMiddleware().MEASURE_END_OPERATION;
},
get buffersSerializer() {
if (buffersSerializer !== undefined) return buffersSerializer;
registerSerializers();
const Serializer = getSerializer();
const binaryMiddleware = getBinaryMiddlewareInstance();
const SerializerMiddleware = getSerializerMiddleware();
const SingleItemMiddleware = getSingleItemMiddleware();
return (buffersSerializer = new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if (context.write) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
);
};
}
}, "md4"),
binaryMiddleware
]));
},
createFileSerializer: (fs, hashFunction) => {
registerSerializers();
const Serializer = getSerializer();
const FileMiddleware = require("../serialization/FileMiddleware");
const fileMiddleware = new FileMiddleware(fs, hashFunction);
const binaryMiddleware = getBinaryMiddlewareInstance();
const SerializerMiddleware = getSerializerMiddleware();
const SingleItemMiddleware = getSingleItemMiddleware();
return new Serializer([
new SingleItemMiddleware(),
new (getObjectMiddleware())(context => {
if (context.write) {
context.writeLazy = value => {
context.write(
SerializerMiddleware.createLazy(value, binaryMiddleware)
);
};
context.writeSeparate = (value, options) => {
const lazy = SerializerMiddleware.createLazy(
value,
fileMiddleware,
options
);
context.write(lazy);
return lazy;
};
}
}, hashFunction),
binaryMiddleware,
fileMiddleware
]);
}
};

206
node_modules/webpack/lib/util/smartGrouping.js generated vendored Normal file
View File

@@ -0,0 +1,206 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/**
* @typedef {Object} GroupOptions
* @property {boolean=} groupChildren
* @property {boolean=} force
* @property {number=} targetGroupCount
*/
/**
* @template T
* @template R
* @typedef {Object} GroupConfig
* @property {function(T): string[]} getKeys
* @property {function(string, (R | T)[], T[]): R} createGroup
* @property {function(string, T[]): GroupOptions=} getOptions
*/
/**
* @template T
* @template R
* @typedef {Object} ItemWithGroups
* @property {T} item
* @property {Set<Group<T, R>>} groups
*/
/**
* @template T
* @template R
* @typedef {{ config: GroupConfig<T, R>, name: string, alreadyGrouped: boolean, items: Set<ItemWithGroups<T, R>> | undefined }} Group
*/
/**
* @template T
* @template R
* @param {T[]} items the list of items
* @param {GroupConfig<T, R>[]} groupConfigs configuration
* @returns {(R | T)[]} grouped items
*/
const smartGrouping = (items, groupConfigs) => {
/** @type {Set<ItemWithGroups<T, R>>} */
const itemsWithGroups = new Set();
/** @type {Map<string, Group<T, R>>} */
const allGroups = new Map();
for (const item of items) {
/** @type {Set<Group<T, R>>} */
const groups = new Set();
for (let i = 0; i < groupConfigs.length; i++) {
const groupConfig = groupConfigs[i];
const keys = groupConfig.getKeys(item);
if (keys) {
for (const name of keys) {
const key = `${i}:${name}`;
let group = allGroups.get(key);
if (group === undefined) {
allGroups.set(
key,
(group = {
config: groupConfig,
name,
alreadyGrouped: false,
items: undefined
})
);
}
groups.add(group);
}
}
}
itemsWithGroups.add({
item,
groups
});
}
/**
* @param {Set<ItemWithGroups<T, R>>} itemsWithGroups input items with groups
* @returns {(T | R)[]} groups items
*/
const runGrouping = itemsWithGroups => {
const totalSize = itemsWithGroups.size;
for (const entry of itemsWithGroups) {
for (const group of entry.groups) {
if (group.alreadyGrouped) continue;
const items = group.items;
if (items === undefined) {
group.items = new Set([entry]);
} else {
items.add(entry);
}
}
}
/** @type {Map<Group<T, R>, { items: Set<ItemWithGroups<T, R>>, options: GroupOptions | false | undefined, used: boolean }>} */
const groupMap = new Map();
for (const group of allGroups.values()) {
if (group.items) {
const items = group.items;
group.items = undefined;
groupMap.set(group, {
items,
options: undefined,
used: false
});
}
}
/** @type {(T | R)[]} */
const results = [];
for (;;) {
/** @type {Group<T, R>} */
let bestGroup = undefined;
let bestGroupSize = -1;
let bestGroupItems = undefined;
let bestGroupOptions = undefined;
for (const [group, state] of groupMap) {
const { items, used } = state;
let options = state.options;
if (options === undefined) {
const groupConfig = group.config;
state.options = options =
(groupConfig.getOptions &&
groupConfig.getOptions(
group.name,
Array.from(items, ({ item }) => item)
)) ||
false;
}
const force = options && options.force;
if (!force) {
if (bestGroupOptions && bestGroupOptions.force) continue;
if (used) continue;
if (items.size <= 1 || totalSize - items.size <= 1) {
continue;
}
}
const targetGroupCount = (options && options.targetGroupCount) || 4;
let sizeValue = force
? items.size
: Math.min(
items.size,
(totalSize * 2) / targetGroupCount +
itemsWithGroups.size -
items.size
);
if (
sizeValue > bestGroupSize ||
(force && (!bestGroupOptions || !bestGroupOptions.force))
) {
bestGroup = group;
bestGroupSize = sizeValue;
bestGroupItems = items;
bestGroupOptions = options;
}
}
if (bestGroup === undefined) {
break;
}
const items = new Set(bestGroupItems);
const options = bestGroupOptions;
const groupChildren = !options || options.groupChildren !== false;
for (const item of items) {
itemsWithGroups.delete(item);
// Remove all groups that items have from the map to not select them again
for (const group of item.groups) {
const state = groupMap.get(group);
if (state !== undefined) {
state.items.delete(item);
if (state.items.size === 0) {
groupMap.delete(group);
} else {
state.options = undefined;
if (groupChildren) {
state.used = true;
}
}
}
}
}
groupMap.delete(bestGroup);
const key = bestGroup.name;
const groupConfig = bestGroup.config;
const allItems = Array.from(items, ({ item }) => item);
bestGroup.alreadyGrouped = true;
const children = groupChildren ? runGrouping(items) : allItems;
bestGroup.alreadyGrouped = false;
results.push(groupConfig.createGroup(key, children, allItems));
}
for (const { item } of itemsWithGroups) {
results.push(item);
}
return results;
};
return runGrouping(itemsWithGroups);
};
module.exports = smartGrouping;

61
node_modules/webpack/lib/util/source.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("webpack-sources").Source} Source */
/** @type {WeakMap<Source, WeakMap<Source, boolean>>} */
const equalityCache = new WeakMap();
/**
* @param {Source} a a source
* @param {Source} b another source
* @returns {boolean} true, when both sources are equal
*/
const _isSourceEqual = (a, b) => {
// prefer .buffer(), it's called anyway during emit
/** @type {Buffer|string} */
let aSource = typeof a.buffer === "function" ? a.buffer() : a.source();
/** @type {Buffer|string} */
let bSource = typeof b.buffer === "function" ? b.buffer() : b.source();
if (aSource === bSource) return true;
if (typeof aSource === "string" && typeof bSource === "string") return false;
if (!Buffer.isBuffer(aSource)) aSource = Buffer.from(aSource, "utf-8");
if (!Buffer.isBuffer(bSource)) bSource = Buffer.from(bSource, "utf-8");
return aSource.equals(bSource);
};
/**
* @param {Source} a a source
* @param {Source} b another source
* @returns {boolean} true, when both sources are equal
*/
const isSourceEqual = (a, b) => {
if (a === b) return true;
const cache1 = equalityCache.get(a);
if (cache1 !== undefined) {
const result = cache1.get(b);
if (result !== undefined) return result;
}
const result = _isSourceEqual(a, b);
if (cache1 !== undefined) {
cache1.set(b, result);
} else {
const map = new WeakMap();
map.set(b, result);
equalityCache.set(a, map);
}
const cache2 = equalityCache.get(b);
if (cache2 !== undefined) {
cache2.set(a, result);
} else {
const map = new WeakMap();
map.set(a, result);
equalityCache.set(b, map);
}
return result;
};
exports.isSourceEqual = isSourceEqual;