This commit is contained in:
lalBi94
2023-03-05 13:23:23 +01:00
commit 7bc56c09b5
14034 changed files with 1834369 additions and 0 deletions

View File

@@ -0,0 +1,92 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_ADVANCED } = require("../OptimizationStages");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
class AggressiveMergingPlugin {
constructor(options) {
if (
(options !== undefined && typeof options !== "object") ||
Array.isArray(options)
) {
throw new Error(
"Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"
);
}
this.options = options || {};
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const options = this.options;
const minSizeReduce = options.minSizeReduce || 1.5;
compiler.hooks.thisCompilation.tap(
"AggressiveMergingPlugin",
compilation => {
compilation.hooks.optimizeChunks.tap(
{
name: "AggressiveMergingPlugin",
stage: STAGE_ADVANCED
},
chunks => {
const chunkGraph = compilation.chunkGraph;
/** @type {{a: Chunk, b: Chunk, improvement: number}[]} */
let combinations = [];
for (const a of chunks) {
if (a.canBeInitial()) continue;
for (const b of chunks) {
if (b.canBeInitial()) continue;
if (b === a) break;
if (!chunkGraph.canChunksBeIntegrated(a, b)) {
continue;
}
const aSize = chunkGraph.getChunkSize(b, {
chunkOverhead: 0
});
const bSize = chunkGraph.getChunkSize(a, {
chunkOverhead: 0
});
const abSize = chunkGraph.getIntegratedChunksSize(b, a, {
chunkOverhead: 0
});
const improvement = (aSize + bSize) / abSize;
combinations.push({
a,
b,
improvement
});
}
}
combinations.sort((a, b) => {
return b.improvement - a.improvement;
});
const pair = combinations[0];
if (!pair) return;
if (pair.improvement < minSizeReduce) return;
chunkGraph.integrateChunks(pair.b, pair.a);
compilation.chunks.delete(pair.a);
return true;
}
);
}
);
}
}
module.exports = AggressiveMergingPlugin;

View File

@@ -0,0 +1,329 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_ADVANCED } = require("../OptimizationStages");
const { intersect } = require("../util/SetHelpers");
const {
compareModulesByIdentifier,
compareChunks
} = require("../util/comparators");
const createSchemaValidation = require("../util/create-schema-validation");
const identifierUtils = require("../util/identifier");
/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
const validate = createSchemaValidation(
require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check.js"),
() =>
require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"),
{
name: "Aggressive Splitting Plugin",
baseDataPath: "options"
}
);
const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => {
return module => {
chunkGraph.disconnectChunkAndModule(oldChunk, module);
chunkGraph.connectChunkAndModule(newChunk, module);
};
};
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Chunk} chunk the chunk
* @returns {function(Module): boolean} filter for entry module
*/
const isNotAEntryModule = (chunkGraph, chunk) => {
return module => {
return !chunkGraph.isEntryModuleInChunk(module, chunk);
};
};
/** @type {WeakSet<Chunk>} */
const recordedChunks = new WeakSet();
class AggressiveSplittingPlugin {
/**
* @param {AggressiveSplittingPluginOptions=} options options object
*/
constructor(options = {}) {
validate(options);
this.options = options;
if (typeof this.options.minSize !== "number") {
this.options.minSize = 30 * 1024;
}
if (typeof this.options.maxSize !== "number") {
this.options.maxSize = 50 * 1024;
}
if (typeof this.options.chunkOverhead !== "number") {
this.options.chunkOverhead = 0;
}
if (typeof this.options.entryChunkMultiplicator !== "number") {
this.options.entryChunkMultiplicator = 1;
}
}
/**
* @param {Chunk} chunk the chunk to test
* @returns {boolean} true if the chunk was recorded
*/
static wasChunkRecorded(chunk) {
return recordedChunks.has(chunk);
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap(
"AggressiveSplittingPlugin",
compilation => {
let needAdditionalSeal = false;
let newSplits;
let fromAggressiveSplittingSet;
let chunkSplitDataMap;
compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
newSplits = [];
fromAggressiveSplittingSet = new Set();
chunkSplitDataMap = new Map();
});
compilation.hooks.optimizeChunks.tap(
{
name: "AggressiveSplittingPlugin",
stage: STAGE_ADVANCED
},
chunks => {
const chunkGraph = compilation.chunkGraph;
// Precompute stuff
const nameToModuleMap = new Map();
const moduleToNameMap = new Map();
const makePathsRelative =
identifierUtils.makePathsRelative.bindContextCache(
compiler.context,
compiler.root
);
for (const m of compilation.modules) {
const name = makePathsRelative(m.identifier());
nameToModuleMap.set(name, m);
moduleToNameMap.set(m, name);
}
// Check used chunk ids
const usedIds = new Set();
for (const chunk of chunks) {
usedIds.add(chunk.id);
}
const recordedSplits =
(compilation.records && compilation.records.aggressiveSplits) ||
[];
const usedSplits = newSplits
? recordedSplits.concat(newSplits)
: recordedSplits;
const minSize = this.options.minSize;
const maxSize = this.options.maxSize;
const applySplit = splitData => {
// Cannot split if id is already taken
if (splitData.id !== undefined && usedIds.has(splitData.id)) {
return false;
}
// Get module objects from names
const selectedModules = splitData.modules.map(name =>
nameToModuleMap.get(name)
);
// Does the modules exist at all?
if (!selectedModules.every(Boolean)) return false;
// Check if size matches (faster than waiting for hash)
let size = 0;
for (const m of selectedModules) size += m.size();
if (size !== splitData.size) return false;
// get chunks with all modules
const selectedChunks = intersect(
selectedModules.map(
m => new Set(chunkGraph.getModuleChunksIterable(m))
)
);
// No relevant chunks found
if (selectedChunks.size === 0) return false;
// The found chunk is already the split or similar
if (
selectedChunks.size === 1 &&
chunkGraph.getNumberOfChunkModules(
Array.from(selectedChunks)[0]
) === selectedModules.length
) {
const chunk = Array.from(selectedChunks)[0];
if (fromAggressiveSplittingSet.has(chunk)) return false;
fromAggressiveSplittingSet.add(chunk);
chunkSplitDataMap.set(chunk, splitData);
return true;
}
// split the chunk into two parts
const newChunk = compilation.addChunk();
newChunk.chunkReason = "aggressive splitted";
for (const chunk of selectedChunks) {
selectedModules.forEach(
moveModuleBetween(chunkGraph, chunk, newChunk)
);
chunk.split(newChunk);
chunk.name = null;
}
fromAggressiveSplittingSet.add(newChunk);
chunkSplitDataMap.set(newChunk, splitData);
if (splitData.id !== null && splitData.id !== undefined) {
newChunk.id = splitData.id;
newChunk.ids = [splitData.id];
}
return true;
};
// try to restore to recorded splitting
let changed = false;
for (let j = 0; j < usedSplits.length; j++) {
const splitData = usedSplits[j];
if (applySplit(splitData)) changed = true;
}
// for any chunk which isn't splitted yet, split it and create a new entry
// start with the biggest chunk
const cmpFn = compareChunks(chunkGraph);
const sortedChunks = Array.from(chunks).sort((a, b) => {
const diff1 =
chunkGraph.getChunkModulesSize(b) -
chunkGraph.getChunkModulesSize(a);
if (diff1) return diff1;
const diff2 =
chunkGraph.getNumberOfChunkModules(a) -
chunkGraph.getNumberOfChunkModules(b);
if (diff2) return diff2;
return cmpFn(a, b);
});
for (const chunk of sortedChunks) {
if (fromAggressiveSplittingSet.has(chunk)) continue;
const size = chunkGraph.getChunkModulesSize(chunk);
if (
size > maxSize &&
chunkGraph.getNumberOfChunkModules(chunk) > 1
) {
const modules = chunkGraph
.getOrderedChunkModules(chunk, compareModulesByIdentifier)
.filter(isNotAEntryModule(chunkGraph, chunk));
const selectedModules = [];
let selectedModulesSize = 0;
for (let k = 0; k < modules.length; k++) {
const module = modules[k];
const newSize = selectedModulesSize + module.size();
if (newSize > maxSize && selectedModulesSize >= minSize) {
break;
}
selectedModulesSize = newSize;
selectedModules.push(module);
}
if (selectedModules.length === 0) continue;
const splitData = {
modules: selectedModules
.map(m => moduleToNameMap.get(m))
.sort(),
size: selectedModulesSize
};
if (applySplit(splitData)) {
newSplits = (newSplits || []).concat(splitData);
changed = true;
}
}
}
if (changed) return true;
}
);
compilation.hooks.recordHash.tap(
"AggressiveSplittingPlugin",
records => {
// 4. save made splittings to records
const allSplits = new Set();
const invalidSplits = new Set();
// Check if some splittings are invalid
// We remove invalid splittings and try again
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
if (splitData.hash && chunk.hash !== splitData.hash) {
// Split was successful, but hash doesn't equal
// We can throw away the split since it's useless now
invalidSplits.add(splitData);
}
}
}
if (invalidSplits.size > 0) {
records.aggressiveSplits = records.aggressiveSplits.filter(
splitData => !invalidSplits.has(splitData)
);
needAdditionalSeal = true;
} else {
// set hash and id values on all (new) splittings
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
splitData.hash = chunk.hash;
splitData.id = chunk.id;
allSplits.add(splitData);
// set flag for stats
recordedChunks.add(chunk);
}
}
// Also add all unused historical splits (after the used ones)
// They can still be used in some future compilation
const recordedSplits =
compilation.records && compilation.records.aggressiveSplits;
if (recordedSplits) {
for (const splitData of recordedSplits) {
if (!invalidSplits.has(splitData)) allSplits.add(splitData);
}
}
// record all splits
records.aggressiveSplits = Array.from(allSplits);
needAdditionalSeal = false;
}
}
);
compilation.hooks.needAdditionalSeal.tap(
"AggressiveSplittingPlugin",
() => {
if (needAdditionalSeal) {
needAdditionalSeal = false;
return true;
}
}
);
}
);
}
}
module.exports = AggressiveSplittingPlugin;

1903
node_modules/webpack/lib/optimize/ConcatenatedModule.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,85 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_BASIC } = require("../OptimizationStages");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compiler")} Compiler */
class EnsureChunkConditionsPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"EnsureChunkConditionsPlugin",
compilation => {
const handler = chunks => {
const chunkGraph = compilation.chunkGraph;
// These sets are hoisted here to save memory
// They are cleared at the end of every loop
/** @type {Set<Chunk>} */
const sourceChunks = new Set();
/** @type {Set<ChunkGroup>} */
const chunkGroups = new Set();
for (const module of compilation.modules) {
if (!module.hasChunkCondition()) continue;
for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
if (!module.chunkCondition(chunk, compilation)) {
sourceChunks.add(chunk);
for (const group of chunk.groupsIterable) {
chunkGroups.add(group);
}
}
}
if (sourceChunks.size === 0) continue;
/** @type {Set<Chunk>} */
const targetChunks = new Set();
chunkGroupLoop: for (const chunkGroup of chunkGroups) {
// Can module be placed in a chunk of this group?
for (const chunk of chunkGroup.chunks) {
if (module.chunkCondition(chunk, compilation)) {
targetChunks.add(chunk);
continue chunkGroupLoop;
}
}
// We reached the entrypoint: fail
if (chunkGroup.isInitial()) {
throw new Error(
"Cannot fullfil chunk condition of " + module.identifier()
);
}
// Try placing in all parents
for (const group of chunkGroup.parentsIterable) {
chunkGroups.add(group);
}
}
for (const sourceChunk of sourceChunks) {
chunkGraph.disconnectChunkAndModule(sourceChunk, module);
}
for (const targetChunk of targetChunks) {
chunkGraph.connectChunkAndModule(targetChunk, module);
}
sourceChunks.clear();
chunkGroups.clear();
}
};
compilation.hooks.optimizeChunks.tap(
{
name: "EnsureChunkConditionsPlugin",
stage: STAGE_BASIC
},
handler
);
}
);
}
}
module.exports = EnsureChunkConditionsPlugin;

View File

@@ -0,0 +1,118 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
class FlagIncludedChunksPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => {
compilation.hooks.optimizeChunkIds.tap(
"FlagIncludedChunksPlugin",
chunks => {
const chunkGraph = compilation.chunkGraph;
// prepare two bit integers for each module
// 2^31 is the max number represented as SMI in v8
// we want the bits distributed this way:
// the bit 2^31 is pretty rar and only one module should get it
// so it has a probability of 1 / modulesCount
// the first bit (2^0) is the easiest and every module could get it
// if it doesn't get a better bit
// from bit 2^n to 2^(n+1) there is a probability of p
// so 1 / modulesCount == p^31
// <=> p = sqrt31(1 / modulesCount)
// so we use a modulo of 1 / sqrt31(1 / modulesCount)
/** @type {WeakMap<Module, number>} */
const moduleBits = new WeakMap();
const modulesCount = compilation.modules.size;
// precalculate the modulo values for each bit
const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31);
const modulos = Array.from(
{ length: 31 },
(x, i) => Math.pow(modulo, i) | 0
);
// iterate all modules to generate bit values
let i = 0;
for (const module of compilation.modules) {
let bit = 30;
while (i % modulos[bit] !== 0) {
bit--;
}
moduleBits.set(module, 1 << bit);
i++;
}
// iterate all chunks to generate bitmaps
/** @type {WeakMap<Chunk, number>} */
const chunkModulesHash = new WeakMap();
for (const chunk of chunks) {
let hash = 0;
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
hash |= moduleBits.get(module);
}
chunkModulesHash.set(chunk, hash);
}
for (const chunkA of chunks) {
const chunkAHash = chunkModulesHash.get(chunkA);
const chunkAModulesCount =
chunkGraph.getNumberOfChunkModules(chunkA);
if (chunkAModulesCount === 0) continue;
let bestModule = undefined;
for (const module of chunkGraph.getChunkModulesIterable(chunkA)) {
if (
bestModule === undefined ||
chunkGraph.getNumberOfModuleChunks(bestModule) >
chunkGraph.getNumberOfModuleChunks(module)
)
bestModule = module;
}
loopB: for (const chunkB of chunkGraph.getModuleChunksIterable(
bestModule
)) {
// as we iterate the same iterables twice
// skip if we find ourselves
if (chunkA === chunkB) continue;
const chunkBModulesCount =
chunkGraph.getNumberOfChunkModules(chunkB);
// ids for empty chunks are not included
if (chunkBModulesCount === 0) continue;
// instead of swapping A and B just bail
// as we loop twice the current A will be B and B then A
if (chunkAModulesCount > chunkBModulesCount) continue;
// is chunkA in chunkB?
// we do a cheap check for the hash value
const chunkBHash = chunkModulesHash.get(chunkB);
if ((chunkBHash & chunkAHash) !== chunkAHash) continue;
// compare all modules
for (const m of chunkGraph.getChunkModulesIterable(chunkA)) {
if (!chunkGraph.isModuleInChunk(m, chunkB)) continue loopB;
}
chunkB.ids.push(chunkA.id);
}
}
}
);
});
}
}
module.exports = FlagIncludedChunksPlugin;

346
node_modules/webpack/lib/optimize/InnerGraph.js generated vendored Normal file
View File

@@ -0,0 +1,346 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Sergey Melyukov @smelukov
*/
"use strict";
const { UsageState } = require("../ExportsInfo");
/** @typedef {import("estree").Node} AnyNode */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../ModuleGraphConnection")} ModuleGraphConnection */
/** @typedef {import("../ModuleGraphConnection").ConnectionState} ConnectionState */
/** @typedef {import("../Parser").ParserState} ParserState */
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
/** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */
/** @typedef {Map<TopLevelSymbol | null, Set<string | TopLevelSymbol> | true>} InnerGraph */
/** @typedef {function(boolean | Set<string> | undefined): void} UsageCallback */
/**
* @typedef {Object} StateObject
* @property {InnerGraph} innerGraph
* @property {TopLevelSymbol=} currentTopLevelSymbol
* @property {Map<TopLevelSymbol, Set<UsageCallback>>} usageCallbackMap
*/
/** @typedef {false|StateObject} State */
/** @type {WeakMap<ParserState, State>} */
const parserStateMap = new WeakMap();
const topLevelSymbolTag = Symbol("top level symbol");
/**
* @param {ParserState} parserState parser state
* @returns {State} state
*/
function getState(parserState) {
return parserStateMap.get(parserState);
}
/**
* @param {ParserState} parserState parser state
* @returns {void}
*/
exports.bailout = parserState => {
parserStateMap.set(parserState, false);
};
/**
* @param {ParserState} parserState parser state
* @returns {void}
*/
exports.enable = parserState => {
const state = parserStateMap.get(parserState);
if (state === false) {
return;
}
parserStateMap.set(parserState, {
innerGraph: new Map(),
currentTopLevelSymbol: undefined,
usageCallbackMap: new Map()
});
};
/**
* @param {ParserState} parserState parser state
* @returns {boolean} true, when enabled
*/
exports.isEnabled = parserState => {
const state = parserStateMap.get(parserState);
return !!state;
};
/**
* @param {ParserState} state parser state
* @param {TopLevelSymbol | null} symbol the symbol, or null for all symbols
* @param {string | TopLevelSymbol | true} usage usage data
* @returns {void}
*/
exports.addUsage = (state, symbol, usage) => {
const innerGraphState = getState(state);
if (innerGraphState) {
const { innerGraph } = innerGraphState;
const info = innerGraph.get(symbol);
if (usage === true) {
innerGraph.set(symbol, true);
} else if (info === undefined) {
innerGraph.set(symbol, new Set([usage]));
} else if (info !== true) {
info.add(usage);
}
}
};
/**
* @param {JavascriptParser} parser the parser
* @param {string} name name of variable
* @param {string | TopLevelSymbol | true} usage usage data
* @returns {void}
*/
exports.addVariableUsage = (parser, name, usage) => {
const symbol =
/** @type {TopLevelSymbol} */ (
parser.getTagData(name, topLevelSymbolTag)
) || exports.tagTopLevelSymbol(parser, name);
if (symbol) {
exports.addUsage(parser.state, symbol, usage);
}
};
/**
* @param {ParserState} state parser state
* @returns {void}
*/
exports.inferDependencyUsage = state => {
const innerGraphState = getState(state);
if (!innerGraphState) {
return;
}
const { innerGraph, usageCallbackMap } = innerGraphState;
const processed = new Map();
// flatten graph to terminal nodes (string, undefined or true)
const nonTerminal = new Set(innerGraph.keys());
while (nonTerminal.size > 0) {
for (const key of nonTerminal) {
/** @type {Set<string|TopLevelSymbol> | true} */
let newSet = new Set();
let isTerminal = true;
const value = innerGraph.get(key);
let alreadyProcessed = processed.get(key);
if (alreadyProcessed === undefined) {
alreadyProcessed = new Set();
processed.set(key, alreadyProcessed);
}
if (value !== true && value !== undefined) {
for (const item of value) {
alreadyProcessed.add(item);
}
for (const item of value) {
if (typeof item === "string") {
newSet.add(item);
} else {
const itemValue = innerGraph.get(item);
if (itemValue === true) {
newSet = true;
break;
}
if (itemValue !== undefined) {
for (const i of itemValue) {
if (i === key) continue;
if (alreadyProcessed.has(i)) continue;
newSet.add(i);
if (typeof i !== "string") {
isTerminal = false;
}
}
}
}
}
if (newSet === true) {
innerGraph.set(key, true);
} else if (newSet.size === 0) {
innerGraph.set(key, undefined);
} else {
innerGraph.set(key, newSet);
}
}
if (isTerminal) {
nonTerminal.delete(key);
// For the global key, merge with all other keys
if (key === null) {
const globalValue = innerGraph.get(null);
if (globalValue) {
for (const [key, value] of innerGraph) {
if (key !== null && value !== true) {
if (globalValue === true) {
innerGraph.set(key, true);
} else {
const newSet = new Set(value);
for (const item of globalValue) {
newSet.add(item);
}
innerGraph.set(key, newSet);
}
}
}
}
}
}
}
}
/** @type {Map<Dependency, true | Set<string>>} */
for (const [symbol, callbacks] of usageCallbackMap) {
const usage = /** @type {true | Set<string> | undefined} */ (
innerGraph.get(symbol)
);
for (const callback of callbacks) {
callback(usage === undefined ? false : usage);
}
}
};
/**
* @param {ParserState} state parser state
* @param {UsageCallback} onUsageCallback on usage callback
*/
exports.onUsage = (state, onUsageCallback) => {
const innerGraphState = getState(state);
if (innerGraphState) {
const { usageCallbackMap, currentTopLevelSymbol } = innerGraphState;
if (currentTopLevelSymbol) {
let callbacks = usageCallbackMap.get(currentTopLevelSymbol);
if (callbacks === undefined) {
callbacks = new Set();
usageCallbackMap.set(currentTopLevelSymbol, callbacks);
}
callbacks.add(onUsageCallback);
} else {
onUsageCallback(true);
}
} else {
onUsageCallback(undefined);
}
};
/**
* @param {ParserState} state parser state
* @param {TopLevelSymbol} symbol the symbol
*/
exports.setTopLevelSymbol = (state, symbol) => {
const innerGraphState = getState(state);
if (innerGraphState) {
innerGraphState.currentTopLevelSymbol = symbol;
}
};
/**
* @param {ParserState} state parser state
* @returns {TopLevelSymbol|void} usage data
*/
exports.getTopLevelSymbol = state => {
const innerGraphState = getState(state);
if (innerGraphState) {
return innerGraphState.currentTopLevelSymbol;
}
};
/**
* @param {JavascriptParser} parser parser
* @param {string} name name of variable
* @returns {TopLevelSymbol} symbol
*/
exports.tagTopLevelSymbol = (parser, name) => {
const innerGraphState = getState(parser.state);
if (!innerGraphState) return;
parser.defineVariable(name);
const existingTag = /** @type {TopLevelSymbol} */ (
parser.getTagData(name, topLevelSymbolTag)
);
if (existingTag) {
return existingTag;
}
const fn = new TopLevelSymbol(name);
parser.tagVariable(name, topLevelSymbolTag, fn);
return fn;
};
/**
* @param {Dependency} dependency the dependency
* @param {Set<string> | boolean} usedByExports usedByExports info
* @param {ModuleGraph} moduleGraph moduleGraph
* @param {RuntimeSpec} runtime runtime
* @returns {boolean} false, when unused. Otherwise true
*/
exports.isDependencyUsedByExports = (
dependency,
usedByExports,
moduleGraph,
runtime
) => {
if (usedByExports === false) return false;
if (usedByExports !== true && usedByExports !== undefined) {
const selfModule = moduleGraph.getParentModule(dependency);
const exportsInfo = moduleGraph.getExportsInfo(selfModule);
let used = false;
for (const exportName of usedByExports) {
if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused)
used = true;
}
if (!used) return false;
}
return true;
};
/**
* @param {Dependency} dependency the dependency
* @param {Set<string> | boolean} usedByExports usedByExports info
* @param {ModuleGraph} moduleGraph moduleGraph
* @returns {null | false | function(ModuleGraphConnection, RuntimeSpec): ConnectionState} function to determine if the connection is active
*/
exports.getDependencyUsedByExportsCondition = (
dependency,
usedByExports,
moduleGraph
) => {
if (usedByExports === false) return false;
if (usedByExports !== true && usedByExports !== undefined) {
const selfModule = moduleGraph.getParentModule(dependency);
const exportsInfo = moduleGraph.getExportsInfo(selfModule);
return (connections, runtime) => {
for (const exportName of usedByExports) {
if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused)
return true;
}
return false;
};
}
return null;
};
class TopLevelSymbol {
/**
* @param {string} name name of the variable
*/
constructor(name) {
this.name = name;
}
}
exports.TopLevelSymbol = TopLevelSymbol;
exports.topLevelSymbolTag = topLevelSymbolTag;

368
node_modules/webpack/lib/optimize/InnerGraphPlugin.js generated vendored Normal file
View File

@@ -0,0 +1,368 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const PureExpressionDependency = require("../dependencies/PureExpressionDependency");
const InnerGraph = require("./InnerGraph");
/** @typedef {import("estree").ClassDeclaration} ClassDeclarationNode */
/** @typedef {import("estree").ClassExpression} ClassExpressionNode */
/** @typedef {import("estree").Node} Node */
/** @typedef {import("estree").VariableDeclarator} VariableDeclaratorNode */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../dependencies/HarmonyImportSpecifierDependency")} HarmonyImportSpecifierDependency */
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
/** @typedef {import("./InnerGraph").InnerGraph} InnerGraph */
/** @typedef {import("./InnerGraph").TopLevelSymbol} TopLevelSymbol */
const { topLevelSymbolTag } = InnerGraph;
class InnerGraphPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"InnerGraphPlugin",
(compilation, { normalModuleFactory }) => {
const logger = compilation.getLogger("webpack.InnerGraphPlugin");
compilation.dependencyTemplates.set(
PureExpressionDependency,
new PureExpressionDependency.Template()
);
/**
* @param {JavascriptParser} parser the parser
* @param {Object} parserOptions options
* @returns {void}
*/
const handler = (parser, parserOptions) => {
const onUsageSuper = sup => {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(sup.range);
dep.loc = sup.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
};
parser.hooks.program.tap("InnerGraphPlugin", () => {
InnerGraph.enable(parser.state);
});
parser.hooks.finish.tap("InnerGraphPlugin", () => {
if (!InnerGraph.isEnabled(parser.state)) return;
logger.time("infer dependency usage");
InnerGraph.inferDependencyUsage(parser.state);
logger.timeAggregate("infer dependency usage");
});
// During prewalking the following datastructures are filled with
// nodes that have a TopLevelSymbol assigned and
// variables are tagged with the assigned TopLevelSymbol
// We differ 3 types of nodes:
// 1. full statements (export default, function declaration)
// 2. classes (class declaration, class expression)
// 3. variable declarators (const x = ...)
/** @type {WeakMap<Node, TopLevelSymbol>} */
const statementWithTopLevelSymbol = new WeakMap();
/** @type {WeakMap<Node, Node>} */
const statementPurePart = new WeakMap();
/** @type {WeakMap<ClassExpressionNode | ClassDeclarationNode, TopLevelSymbol>} */
const classWithTopLevelSymbol = new WeakMap();
/** @type {WeakMap<VariableDeclaratorNode, TopLevelSymbol>} */
const declWithTopLevelSymbol = new WeakMap();
/** @type {WeakSet<VariableDeclaratorNode>} */
const pureDeclarators = new WeakSet();
// The following hooks are used during prewalking:
parser.hooks.preStatement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
if (statement.type === "FunctionDeclaration") {
const name = statement.id ? statement.id.name : "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
statementWithTopLevelSymbol.set(statement, fn);
return true;
}
}
});
parser.hooks.blockPreStatement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
if (statement.type === "ClassDeclaration") {
const name = statement.id ? statement.id.name : "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
classWithTopLevelSymbol.set(statement, fn);
return true;
}
if (statement.type === "ExportDefaultDeclaration") {
const name = "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
const decl = statement.declaration;
if (
decl.type === "ClassExpression" ||
decl.type === "ClassDeclaration"
) {
classWithTopLevelSymbol.set(decl, fn);
} else if (parser.isPure(decl, statement.range[0])) {
statementWithTopLevelSymbol.set(statement, fn);
if (
!decl.type.endsWith("FunctionExpression") &&
!decl.type.endsWith("Declaration") &&
decl.type !== "Literal"
) {
statementPurePart.set(statement, decl);
}
}
}
}
});
parser.hooks.preDeclarator.tap(
"InnerGraphPlugin",
(decl, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (
parser.scope.topLevelScope === true &&
decl.init &&
decl.id.type === "Identifier"
) {
const name = decl.id.name;
if (decl.init.type === "ClassExpression") {
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
classWithTopLevelSymbol.set(decl.init, fn);
} else if (parser.isPure(decl.init, decl.id.range[1])) {
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
declWithTopLevelSymbol.set(decl, fn);
if (
!decl.init.type.endsWith("FunctionExpression") &&
decl.init.type !== "Literal"
) {
pureDeclarators.add(decl);
}
return true;
}
}
}
);
// During real walking we set the TopLevelSymbol state to the assigned
// TopLevelSymbol by using the fill datastructures.
// In addition to tracking TopLevelSymbols, we sometimes need to
// add a PureExpressionDependency. This is needed to skip execution
// of pure expressions, even when they are not dropped due to
// minimizing. Otherwise symbols used there might not exist anymore
// as they are removed as unused by this optimization
// When we find a reference to a TopLevelSymbol, we register a
// TopLevelSymbol dependency from TopLevelSymbol in state to the
// referenced TopLevelSymbol. This way we get a graph of all
// TopLevelSymbols.
// The following hooks are called during walking:
parser.hooks.statement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
const fn = statementWithTopLevelSymbol.get(statement);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
const purePart = statementPurePart.get(statement);
if (purePart) {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
purePart.range
);
dep.loc = statement.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
}
}
});
parser.hooks.classExtendsExpression.tap(
"InnerGraphPlugin",
(expr, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(statement);
if (
fn &&
parser.isPure(
expr,
statement.id ? statement.id.range[1] : statement.range[0]
)
) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
onUsageSuper(expr);
}
}
}
);
parser.hooks.classBodyElement.tap(
"InnerGraphPlugin",
(element, classDefinition) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(classDefinition);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
}
}
}
);
parser.hooks.classBodyValue.tap(
"InnerGraphPlugin",
(expression, element, classDefinition) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(classDefinition);
if (fn) {
if (
!element.static ||
parser.isPure(
expression,
element.key ? element.key.range[1] : element.range[0]
)
) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
if (element.type !== "MethodDefinition" && element.static) {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
expression.range
);
dep.loc = expression.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
} else {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
}
}
}
}
);
parser.hooks.declarator.tap("InnerGraphPlugin", (decl, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
const fn = declWithTopLevelSymbol.get(decl);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
if (pureDeclarators.has(decl)) {
if (decl.init.type === "ClassExpression") {
if (decl.init.superClass) {
onUsageSuper(decl.init.superClass);
}
} else {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
decl.init.range
);
dep.loc = decl.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
}
parser.walkExpression(decl.init);
InnerGraph.setTopLevelSymbol(parser.state, undefined);
return true;
}
});
parser.hooks.expression
.for(topLevelSymbolTag)
.tap("InnerGraphPlugin", () => {
const topLevelSymbol = /** @type {TopLevelSymbol} */ (
parser.currentTagData
);
const currentTopLevelSymbol = InnerGraph.getTopLevelSymbol(
parser.state
);
InnerGraph.addUsage(
parser.state,
topLevelSymbol,
currentTopLevelSymbol || true
);
});
parser.hooks.assign
.for(topLevelSymbolTag)
.tap("InnerGraphPlugin", expr => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (expr.operator === "=") return true;
});
};
normalModuleFactory.hooks.parser
.for("javascript/auto")
.tap("InnerGraphPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/esm")
.tap("InnerGraphPlugin", handler);
compilation.hooks.finishModules.tap("InnerGraphPlugin", () => {
logger.timeAggregateEnd("infer dependency usage");
});
}
);
}
}
module.exports = InnerGraphPlugin;

View File

@@ -0,0 +1,256 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_ADVANCED } = require("../OptimizationStages");
const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
const { compareChunks } = require("../util/comparators");
const createSchemaValidation = require("../util/create-schema-validation");
/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"),
() => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"),
{
name: "Limit Chunk Count Plugin",
baseDataPath: "options"
}
);
/**
* @typedef {Object} ChunkCombination
* @property {boolean} deleted this is set to true when combination was removed
* @property {number} sizeDiff
* @property {number} integratedSize
* @property {Chunk} a
* @property {Chunk} b
* @property {number} aIdx
* @property {number} bIdx
* @property {number} aSize
* @property {number} bSize
*/
const addToSetMap = (map, key, value) => {
const set = map.get(key);
if (set === undefined) {
map.set(key, new Set([value]));
} else {
set.add(value);
}
};
class LimitChunkCountPlugin {
/**
* @param {LimitChunkCountPluginOptions=} options options object
*/
constructor(options) {
validate(options);
this.options = options;
}
/**
* @param {Compiler} compiler the webpack compiler
* @returns {void}
*/
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
compilation.hooks.optimizeChunks.tap(
{
name: "LimitChunkCountPlugin",
stage: STAGE_ADVANCED
},
chunks => {
const chunkGraph = compilation.chunkGraph;
const maxChunks = options.maxChunks;
if (!maxChunks) return;
if (maxChunks < 1) return;
if (compilation.chunks.size <= maxChunks) return;
let remainingChunksToMerge = compilation.chunks.size - maxChunks;
// order chunks in a deterministic way
const compareChunksWithGraph = compareChunks(chunkGraph);
const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph);
// create a lazy sorted data structure to keep all combinations
// this is large. Size = chunks * (chunks - 1) / 2
// It uses a multi layer bucket sort plus normal sort in the last layer
// It's also lazy so only accessed buckets are sorted
const combinations = new LazyBucketSortedSet(
// Layer 1: ordered by largest size benefit
c => c.sizeDiff,
(a, b) => b - a,
// Layer 2: ordered by smallest combined size
c => c.integratedSize,
(a, b) => a - b,
// Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
c => c.bIdx - c.aIdx,
(a, b) => a - b,
// Layer 4: ordered by position in orderedChunk (-> to be deterministic)
(a, b) => a.bIdx - b.bIdx
);
// we keep a mapping from chunk to all combinations
// but this mapping is not kept up-to-date with deletions
// so `deleted` flag need to be considered when iterating this
/** @type {Map<Chunk, Set<ChunkCombination>>} */
const combinationsByChunk = new Map();
orderedChunks.forEach((b, bIdx) => {
// create combination pairs with size and integrated size
for (let aIdx = 0; aIdx < bIdx; aIdx++) {
const a = orderedChunks[aIdx];
// filter pairs that can not be integrated!
if (!chunkGraph.canChunksBeIntegrated(a, b)) continue;
const integratedSize = chunkGraph.getIntegratedChunksSize(
a,
b,
options
);
const aSize = chunkGraph.getChunkSize(a, options);
const bSize = chunkGraph.getChunkSize(b, options);
const c = {
deleted: false,
sizeDiff: aSize + bSize - integratedSize,
integratedSize,
a,
b,
aIdx,
bIdx,
aSize,
bSize
};
combinations.add(c);
addToSetMap(combinationsByChunk, a, c);
addToSetMap(combinationsByChunk, b, c);
}
return combinations;
});
// list of modified chunks during this run
// combinations affected by this change are skipped to allow
// further optimizations
/** @type {Set<Chunk>} */
const modifiedChunks = new Set();
let changed = false;
// eslint-disable-next-line no-constant-condition
loop: while (true) {
const combination = combinations.popFirst();
if (combination === undefined) break;
combination.deleted = true;
const { a, b, integratedSize } = combination;
// skip over pair when
// one of the already merged chunks is a parent of one of the chunks
if (modifiedChunks.size > 0) {
const queue = new Set(a.groupsIterable);
for (const group of b.groupsIterable) {
queue.add(group);
}
for (const group of queue) {
for (const mChunk of modifiedChunks) {
if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
// This is a potential pair which needs recalculation
// We can't do that now, but it merge before following pairs
// so we leave space for it, and consider chunks as modified
// just for the worse case
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break loop;
modifiedChunks.add(a);
modifiedChunks.add(b);
continue loop;
}
}
for (const parent of group.parentsIterable) {
queue.add(parent);
}
}
}
// merge the chunks
if (chunkGraph.canChunksBeIntegrated(a, b)) {
chunkGraph.integrateChunks(a, b);
compilation.chunks.delete(b);
// flag chunk a as modified as further optimization are possible for all children here
modifiedChunks.add(a);
changed = true;
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break;
// Update all affected combinations
// delete all combination with the removed chunk
// we will use combinations with the kept chunk instead
for (const combination of combinationsByChunk.get(a)) {
if (combination.deleted) continue;
combination.deleted = true;
combinations.delete(combination);
}
// Update combinations with the kept chunk with new sizes
for (const combination of combinationsByChunk.get(b)) {
if (combination.deleted) continue;
if (combination.a === b) {
if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
// Update size
const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
a,
combination.b,
options
);
const finishUpdate = combinations.startUpdate(combination);
combination.a = a;
combination.integratedSize = newIntegratedSize;
combination.aSize = integratedSize;
combination.sizeDiff =
combination.bSize + integratedSize - newIntegratedSize;
finishUpdate();
} else if (combination.b === b) {
if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
// Update size
const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
combination.a,
a,
options
);
const finishUpdate = combinations.startUpdate(combination);
combination.b = a;
combination.integratedSize = newIntegratedSize;
combination.bSize = integratedSize;
combination.sizeDiff =
integratedSize + combination.aSize - newIntegratedSize;
finishUpdate();
}
}
combinationsByChunk.set(a, combinationsByChunk.get(b));
combinationsByChunk.delete(b);
}
}
if (changed) return true;
}
);
});
}
}
module.exports = LimitChunkCountPlugin;

View File

@@ -0,0 +1,177 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { UsageState } = require("../ExportsInfo");
const {
numberToIdentifier,
NUMBER_OF_IDENTIFIER_START_CHARS,
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS
} = require("../Template");
const { assignDeterministicIds } = require("../ids/IdHelpers");
const { compareSelect, compareStringsNumeric } = require("../util/comparators");
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../ExportsInfo")} ExportsInfo */
/** @typedef {import("../ExportsInfo").ExportInfo} ExportInfo */
/**
* @param {ExportsInfo} exportsInfo exports info
* @returns {boolean} mangle is possible
*/
const canMangle = exportsInfo => {
if (exportsInfo.otherExportsInfo.getUsed(undefined) !== UsageState.Unused)
return false;
let hasSomethingToMangle = false;
for (const exportInfo of exportsInfo.exports) {
if (exportInfo.canMangle === true) {
hasSomethingToMangle = true;
}
}
return hasSomethingToMangle;
};
// Sort by name
const comparator = compareSelect(e => e.name, compareStringsNumeric);
/**
* @param {boolean} deterministic use deterministic names
* @param {ExportsInfo} exportsInfo exports info
* @param {boolean} isNamespace is namespace object
* @returns {void}
*/
const mangleExportsInfo = (deterministic, exportsInfo, isNamespace) => {
if (!canMangle(exportsInfo)) return;
const usedNames = new Set();
/** @type {ExportInfo[]} */
const mangleableExports = [];
// Avoid to renamed exports that are not provided when
// 1. it's not a namespace export: non-provided exports can be found in prototype chain
// 2. there are other provided exports and deterministic mode is chosen:
// non-provided exports would break the determinism
let avoidMangleNonProvided = !isNamespace;
if (!avoidMangleNonProvided && deterministic) {
for (const exportInfo of exportsInfo.ownedExports) {
if (exportInfo.provided !== false) {
avoidMangleNonProvided = true;
break;
}
}
}
for (const exportInfo of exportsInfo.ownedExports) {
const name = exportInfo.name;
if (!exportInfo.hasUsedName()) {
if (
// Can the export be mangled?
exportInfo.canMangle !== true ||
// Never rename 1 char exports
(name.length === 1 && /^[a-zA-Z0-9_$]/.test(name)) ||
// Don't rename 2 char exports in deterministic mode
(deterministic &&
name.length === 2 &&
/^[a-zA-Z_$][a-zA-Z0-9_$]|^[1-9][0-9]/.test(name)) ||
// Don't rename exports that are not provided
(avoidMangleNonProvided && exportInfo.provided !== true)
) {
exportInfo.setUsedName(name);
usedNames.add(name);
} else {
mangleableExports.push(exportInfo);
}
}
if (exportInfo.exportsInfoOwned) {
const used = exportInfo.getUsed(undefined);
if (
used === UsageState.OnlyPropertiesUsed ||
used === UsageState.Unused
) {
mangleExportsInfo(deterministic, exportInfo.exportsInfo, false);
}
}
}
if (deterministic) {
assignDeterministicIds(
mangleableExports,
e => e.name,
comparator,
(e, id) => {
const name = numberToIdentifier(id);
const size = usedNames.size;
usedNames.add(name);
if (size === usedNames.size) return false;
e.setUsedName(name);
return true;
},
[
NUMBER_OF_IDENTIFIER_START_CHARS,
NUMBER_OF_IDENTIFIER_START_CHARS *
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS
],
NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS,
usedNames.size
);
} else {
const usedExports = [];
const unusedExports = [];
for (const exportInfo of mangleableExports) {
if (exportInfo.getUsed(undefined) === UsageState.Unused) {
unusedExports.push(exportInfo);
} else {
usedExports.push(exportInfo);
}
}
usedExports.sort(comparator);
unusedExports.sort(comparator);
let i = 0;
for (const list of [usedExports, unusedExports]) {
for (const exportInfo of list) {
let name;
do {
name = numberToIdentifier(i++);
} while (usedNames.has(name));
exportInfo.setUsedName(name);
}
}
}
};
class MangleExportsPlugin {
/**
* @param {boolean} deterministic use deterministic names
*/
constructor(deterministic) {
this._deterministic = deterministic;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const { _deterministic: deterministic } = this;
compiler.hooks.compilation.tap("MangleExportsPlugin", compilation => {
const moduleGraph = compilation.moduleGraph;
compilation.hooks.optimizeCodeGeneration.tap(
"MangleExportsPlugin",
modules => {
if (compilation.moduleMemCaches) {
throw new Error(
"optimization.mangleExports can't be used with cacheUnaffected as export mangling is a global effect"
);
}
for (const module of modules) {
const isNamespace =
module.buildMeta && module.buildMeta.exportsType === "namespace";
const exportsInfo = moduleGraph.getExportsInfo(module);
mangleExportsInfo(deterministic, exportsInfo, isNamespace);
}
}
);
});
}
}
module.exports = MangleExportsPlugin;

View File

@@ -0,0 +1,115 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_BASIC } = require("../OptimizationStages");
const { runtimeEqual } = require("../util/runtime");
/** @typedef {import("../Compiler")} Compiler */
class MergeDuplicateChunksPlugin {
/**
* @param {Compiler} compiler the compiler
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"MergeDuplicateChunksPlugin",
compilation => {
compilation.hooks.optimizeChunks.tap(
{
name: "MergeDuplicateChunksPlugin",
stage: STAGE_BASIC
},
chunks => {
const { chunkGraph, moduleGraph } = compilation;
// remember already tested chunks for performance
const notDuplicates = new Set();
// for each chunk
for (const chunk of chunks) {
// track a Set of all chunk that could be duplicates
let possibleDuplicates;
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
if (possibleDuplicates === undefined) {
// when possibleDuplicates is not yet set,
// create a new Set from chunks of the current module
// including only chunks with the same number of modules
for (const dup of chunkGraph.getModuleChunksIterable(
module
)) {
if (
dup !== chunk &&
chunkGraph.getNumberOfChunkModules(chunk) ===
chunkGraph.getNumberOfChunkModules(dup) &&
!notDuplicates.has(dup)
) {
// delay allocating the new Set until here, reduce memory pressure
if (possibleDuplicates === undefined) {
possibleDuplicates = new Set();
}
possibleDuplicates.add(dup);
}
}
// when no chunk is possible we can break here
if (possibleDuplicates === undefined) break;
} else {
// validate existing possible duplicates
for (const dup of possibleDuplicates) {
// remove possible duplicate when module is not contained
if (!chunkGraph.isModuleInChunk(module, dup)) {
possibleDuplicates.delete(dup);
}
}
// when all chunks has been removed we can break here
if (possibleDuplicates.size === 0) break;
}
}
// when we found duplicates
if (
possibleDuplicates !== undefined &&
possibleDuplicates.size > 0
) {
outer: for (const otherChunk of possibleDuplicates) {
if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;
if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0)
continue;
if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) {
for (const module of chunkGraph.getChunkModulesIterable(
chunk
)) {
const exportsInfo = moduleGraph.getExportsInfo(module);
if (
!exportsInfo.isEquallyUsed(
chunk.runtime,
otherChunk.runtime
)
) {
continue outer;
}
}
}
// merge them
if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) {
chunkGraph.integrateChunks(chunk, otherChunk);
compilation.chunks.delete(otherChunk);
}
}
}
// don't check already processed chunks twice
notDuplicates.add(chunk);
}
}
);
}
);
}
}
module.exports = MergeDuplicateChunksPlugin;

113
node_modules/webpack/lib/optimize/MinChunkSizePlugin.js generated vendored Normal file
View File

@@ -0,0 +1,113 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_ADVANCED } = require("../OptimizationStages");
const createSchemaValidation = require("../util/create-schema-validation");
/** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
const validate = createSchemaValidation(
require("../../schemas/plugins/optimize/MinChunkSizePlugin.check.js"),
() => require("../../schemas/plugins/optimize/MinChunkSizePlugin.json"),
{
name: "Min Chunk Size Plugin",
baseDataPath: "options"
}
);
class MinChunkSizePlugin {
/**
* @param {MinChunkSizePluginOptions} options options object
*/
constructor(options) {
validate(options);
this.options = options;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
const options = this.options;
const minChunkSize = options.minChunkSize;
compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => {
compilation.hooks.optimizeChunks.tap(
{
name: "MinChunkSizePlugin",
stage: STAGE_ADVANCED
},
chunks => {
const chunkGraph = compilation.chunkGraph;
const equalOptions = {
chunkOverhead: 1,
entryChunkMultiplicator: 1
};
const chunkSizesMap = new Map();
/** @type {[Chunk, Chunk][]} */
const combinations = [];
/** @type {Chunk[]} */
const smallChunks = [];
const visitedChunks = [];
for (const a of chunks) {
// check if one of the chunks sizes is smaller than the minChunkSize
// and filter pairs that can NOT be integrated!
if (chunkGraph.getChunkSize(a, equalOptions) < minChunkSize) {
smallChunks.push(a);
for (const b of visitedChunks) {
if (chunkGraph.canChunksBeIntegrated(b, a))
combinations.push([b, a]);
}
} else {
for (const b of smallChunks) {
if (chunkGraph.canChunksBeIntegrated(b, a))
combinations.push([b, a]);
}
}
chunkSizesMap.set(a, chunkGraph.getChunkSize(a, options));
visitedChunks.push(a);
}
const sortedSizeFilteredExtendedPairCombinations = combinations
.map(pair => {
// extend combination pairs with size and integrated size
const a = chunkSizesMap.get(pair[0]);
const b = chunkSizesMap.get(pair[1]);
const ab = chunkGraph.getIntegratedChunksSize(
pair[0],
pair[1],
options
);
/** @type {[number, number, Chunk, Chunk]} */
const extendedPair = [a + b - ab, ab, pair[0], pair[1]];
return extendedPair;
})
.sort((a, b) => {
// sadly javascript does an in place sort here
// sort by size
const diff = b[0] - a[0];
if (diff !== 0) return diff;
return a[1] - b[1];
});
if (sortedSizeFilteredExtendedPairCombinations.length === 0) return;
const pair = sortedSizeFilteredExtendedPairCombinations[0];
chunkGraph.integrateChunks(pair[2], pair[3]);
compilation.chunks.delete(pair[3]);
return true;
}
);
});
}
}
module.exports = MinChunkSizePlugin;

30
node_modules/webpack/lib/optimize/MinMaxSizeWarning.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const SizeFormatHelpers = require("../SizeFormatHelpers");
const WebpackError = require("../WebpackError");
class MinMaxSizeWarning extends WebpackError {
constructor(keys, minSize, maxSize) {
let keysMessage = "Fallback cache group";
if (keys) {
keysMessage =
keys.length > 1
? `Cache groups ${keys.sort().join(", ")}`
: `Cache group ${keys[0]}`;
}
super(
`SplitChunksPlugin\n` +
`${keysMessage}\n` +
`Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` +
`bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` +
"This seem to be a invalid optimization.splitChunks configuration."
);
}
}
module.exports = MinMaxSizeWarning;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,408 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { SyncBailHook } = require("tapable");
const { RawSource, CachedSource, CompatSource } = require("webpack-sources");
const Compilation = require("../Compilation");
const WebpackError = require("../WebpackError");
const { compareSelect, compareStrings } = require("../util/comparators");
const createHash = require("../util/createHash");
/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
/** @typedef {import("../Compiler")} Compiler */
const EMPTY_SET = new Set();
const addToList = (itemOrItems, list) => {
if (Array.isArray(itemOrItems)) {
for (const item of itemOrItems) {
list.add(item);
}
} else if (itemOrItems) {
list.add(itemOrItems);
}
};
/**
* @template T
* @param {T[]} input list
* @param {function(T): Buffer} fn map function
* @returns {Buffer[]} buffers without duplicates
*/
const mapAndDeduplicateBuffers = (input, fn) => {
// Buffer.equals compares size first so this should be efficient enough
// If it becomes a performance problem we can use a map and group by size
// instead of looping over all assets.
const result = [];
outer: for (const value of input) {
const buf = fn(value);
for (const other of result) {
if (buf.equals(other)) continue outer;
}
result.push(buf);
}
return result;
};
/**
* Escapes regular expression metacharacters
* @param {string} str String to quote
* @returns {string} Escaped string
*/
const quoteMeta = str => {
return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
};
const cachedSourceMap = new WeakMap();
const toCachedSource = source => {
if (source instanceof CachedSource) {
return source;
}
const entry = cachedSourceMap.get(source);
if (entry !== undefined) return entry;
const newSource = new CachedSource(CompatSource.from(source));
cachedSourceMap.set(source, newSource);
return newSource;
};
/**
* @typedef {Object} AssetInfoForRealContentHash
* @property {string} name
* @property {AssetInfo} info
* @property {Source} source
* @property {RawSource | undefined} newSource
* @property {RawSource | undefined} newSourceWithoutOwn
* @property {string} content
* @property {Set<string>} ownHashes
* @property {Promise} contentComputePromise
* @property {Promise} contentComputeWithoutOwnPromise
* @property {Set<string>} referencedHashes
* @property {Set<string>} hashes
*/
/**
* @typedef {Object} CompilationHooks
* @property {SyncBailHook<[Buffer[], string], string>} updateHash
*/
/** @type {WeakMap<Compilation, CompilationHooks>} */
const compilationHooksMap = new WeakMap();
class RealContentHashPlugin {
/**
* @param {Compilation} compilation the compilation
* @returns {CompilationHooks} the attached hooks
*/
static getCompilationHooks(compilation) {
if (!(compilation instanceof Compilation)) {
throw new TypeError(
"The 'compilation' argument must be an instance of Compilation"
);
}
let hooks = compilationHooksMap.get(compilation);
if (hooks === undefined) {
hooks = {
updateHash: new SyncBailHook(["content", "oldHash"])
};
compilationHooksMap.set(compilation, hooks);
}
return hooks;
}
constructor({ hashFunction, hashDigest }) {
this._hashFunction = hashFunction;
this._hashDigest = hashDigest;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap("RealContentHashPlugin", compilation => {
const cacheAnalyse = compilation.getCache(
"RealContentHashPlugin|analyse"
);
const cacheGenerate = compilation.getCache(
"RealContentHashPlugin|generate"
);
const hooks = RealContentHashPlugin.getCompilationHooks(compilation);
compilation.hooks.processAssets.tapPromise(
{
name: "RealContentHashPlugin",
stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH
},
async () => {
const assets = compilation.getAssets();
/** @type {AssetInfoForRealContentHash[]} */
const assetsWithInfo = [];
const hashToAssets = new Map();
for (const { source, info, name } of assets) {
const cachedSource = toCachedSource(source);
const content = cachedSource.source();
/** @type {Set<string>} */
const hashes = new Set();
addToList(info.contenthash, hashes);
const data = {
name,
info,
source: cachedSource,
/** @type {RawSource | undefined} */
newSource: undefined,
/** @type {RawSource | undefined} */
newSourceWithoutOwn: undefined,
content,
/** @type {Set<string>} */
ownHashes: undefined,
contentComputePromise: undefined,
contentComputeWithoutOwnPromise: undefined,
/** @type {Set<string>} */
referencedHashes: undefined,
hashes
};
assetsWithInfo.push(data);
for (const hash of hashes) {
const list = hashToAssets.get(hash);
if (list === undefined) {
hashToAssets.set(hash, [data]);
} else {
list.push(data);
}
}
}
if (hashToAssets.size === 0) return;
const hashRegExp = new RegExp(
Array.from(hashToAssets.keys(), quoteMeta).join("|"),
"g"
);
await Promise.all(
assetsWithInfo.map(async asset => {
const { name, source, content, hashes } = asset;
if (Buffer.isBuffer(content)) {
asset.referencedHashes = EMPTY_SET;
asset.ownHashes = EMPTY_SET;
return;
}
const etag = cacheAnalyse.mergeEtags(
cacheAnalyse.getLazyHashedEtag(source),
Array.from(hashes).join("|")
);
[asset.referencedHashes, asset.ownHashes] =
await cacheAnalyse.providePromise(name, etag, () => {
const referencedHashes = new Set();
let ownHashes = new Set();
const inContent = content.match(hashRegExp);
if (inContent) {
for (const hash of inContent) {
if (hashes.has(hash)) {
ownHashes.add(hash);
continue;
}
referencedHashes.add(hash);
}
}
return [referencedHashes, ownHashes];
});
})
);
const getDependencies = hash => {
const assets = hashToAssets.get(hash);
if (!assets) {
const referencingAssets = assetsWithInfo.filter(asset =>
asset.referencedHashes.has(hash)
);
const err = new WebpackError(`RealContentHashPlugin
Some kind of unexpected caching problem occurred.
An asset was cached with a reference to another asset (${hash}) that's not in the compilation anymore.
Either the asset was incorrectly cached, or the referenced asset should also be restored from cache.
Referenced by:
${referencingAssets
.map(a => {
const match = new RegExp(`.{0,20}${quoteMeta(hash)}.{0,20}`).exec(
a.content
);
return ` - ${a.name}: ...${match ? match[0] : "???"}...`;
})
.join("\n")}`);
compilation.errors.push(err);
return undefined;
}
const hashes = new Set();
for (const { referencedHashes, ownHashes } of assets) {
if (!ownHashes.has(hash)) {
for (const hash of ownHashes) {
hashes.add(hash);
}
}
for (const hash of referencedHashes) {
hashes.add(hash);
}
}
return hashes;
};
const hashInfo = hash => {
const assets = hashToAssets.get(hash);
return `${hash} (${Array.from(assets, a => a.name)})`;
};
const hashesInOrder = new Set();
for (const hash of hashToAssets.keys()) {
const add = (hash, stack) => {
const deps = getDependencies(hash);
if (!deps) return;
stack.add(hash);
for (const dep of deps) {
if (hashesInOrder.has(dep)) continue;
if (stack.has(dep)) {
throw new Error(
`Circular hash dependency ${Array.from(
stack,
hashInfo
).join(" -> ")} -> ${hashInfo(dep)}`
);
}
add(dep, stack);
}
hashesInOrder.add(hash);
stack.delete(hash);
};
if (hashesInOrder.has(hash)) continue;
add(hash, new Set());
}
const hashToNewHash = new Map();
const getEtag = asset =>
cacheGenerate.mergeEtags(
cacheGenerate.getLazyHashedEtag(asset.source),
Array.from(asset.referencedHashes, hash =>
hashToNewHash.get(hash)
).join("|")
);
const computeNewContent = asset => {
if (asset.contentComputePromise) return asset.contentComputePromise;
return (asset.contentComputePromise = (async () => {
if (
asset.ownHashes.size > 0 ||
Array.from(asset.referencedHashes).some(
hash => hashToNewHash.get(hash) !== hash
)
) {
const identifier = asset.name;
const etag = getEtag(asset);
asset.newSource = await cacheGenerate.providePromise(
identifier,
etag,
() => {
const newContent = asset.content.replace(hashRegExp, hash =>
hashToNewHash.get(hash)
);
return new RawSource(newContent);
}
);
}
})());
};
const computeNewContentWithoutOwn = asset => {
if (asset.contentComputeWithoutOwnPromise)
return asset.contentComputeWithoutOwnPromise;
return (asset.contentComputeWithoutOwnPromise = (async () => {
if (
asset.ownHashes.size > 0 ||
Array.from(asset.referencedHashes).some(
hash => hashToNewHash.get(hash) !== hash
)
) {
const identifier = asset.name + "|without-own";
const etag = getEtag(asset);
asset.newSourceWithoutOwn = await cacheGenerate.providePromise(
identifier,
etag,
() => {
const newContent = asset.content.replace(
hashRegExp,
hash => {
if (asset.ownHashes.has(hash)) {
return "";
}
return hashToNewHash.get(hash);
}
);
return new RawSource(newContent);
}
);
}
})());
};
const comparator = compareSelect(a => a.name, compareStrings);
for (const oldHash of hashesInOrder) {
const assets = hashToAssets.get(oldHash);
assets.sort(comparator);
const hash = createHash(this._hashFunction);
await Promise.all(
assets.map(asset =>
asset.ownHashes.has(oldHash)
? computeNewContentWithoutOwn(asset)
: computeNewContent(asset)
)
);
const assetsContent = mapAndDeduplicateBuffers(assets, asset => {
if (asset.ownHashes.has(oldHash)) {
return asset.newSourceWithoutOwn
? asset.newSourceWithoutOwn.buffer()
: asset.source.buffer();
} else {
return asset.newSource
? asset.newSource.buffer()
: asset.source.buffer();
}
});
let newHash = hooks.updateHash.call(assetsContent, oldHash);
if (!newHash) {
for (const content of assetsContent) {
hash.update(content);
}
const digest = hash.digest(this._hashDigest);
newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
}
hashToNewHash.set(oldHash, newHash);
}
await Promise.all(
assetsWithInfo.map(async asset => {
await computeNewContent(asset);
const newName = asset.name.replace(hashRegExp, hash =>
hashToNewHash.get(hash)
);
const infoUpdate = {};
const hash = asset.info.contenthash;
infoUpdate.contenthash = Array.isArray(hash)
? hash.map(hash => hashToNewHash.get(hash))
: hashToNewHash.get(hash);
if (asset.newSource !== undefined) {
compilation.updateAsset(
asset.name,
asset.newSource,
infoUpdate
);
} else {
compilation.updateAsset(asset.name, asset.source, infoUpdate);
}
if (asset.name !== newName) {
compilation.renameAsset(asset.name, newName);
}
})
);
}
);
});
}
}
module.exports = RealContentHashPlugin;

View File

@@ -0,0 +1,57 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_BASIC, STAGE_ADVANCED } = require("../OptimizationStages");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */
class RemoveEmptyChunksPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => {
/**
* @param {Iterable<Chunk>} chunks the chunks array
* @returns {void}
*/
const handler = chunks => {
const chunkGraph = compilation.chunkGraph;
for (const chunk of chunks) {
if (
chunkGraph.getNumberOfChunkModules(chunk) === 0 &&
!chunk.hasRuntime() &&
chunkGraph.getNumberOfEntryModules(chunk) === 0
) {
compilation.chunkGraph.disconnectChunk(chunk);
compilation.chunks.delete(chunk);
}
}
};
// TODO do it once
compilation.hooks.optimizeChunks.tap(
{
name: "RemoveEmptyChunksPlugin",
stage: STAGE_BASIC
},
handler
);
compilation.hooks.optimizeChunks.tap(
{
name: "RemoveEmptyChunksPlugin",
stage: STAGE_ADVANCED
},
handler
);
});
}
}
module.exports = RemoveEmptyChunksPlugin;

View File

@@ -0,0 +1,121 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const { STAGE_BASIC } = require("../OptimizationStages");
const Queue = require("../util/Queue");
const { intersect } = require("../util/SetHelpers");
/** @typedef {import("../Compiler")} Compiler */
class RemoveParentModulesPlugin {
/**
* @param {Compiler} compiler the compiler
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => {
const handler = (chunks, chunkGroups) => {
const chunkGraph = compilation.chunkGraph;
const queue = new Queue();
const availableModulesMap = new WeakMap();
for (const chunkGroup of compilation.entrypoints.values()) {
// initialize available modules for chunks without parents
availableModulesMap.set(chunkGroup, new Set());
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
for (const chunkGroup of compilation.asyncEntrypoints) {
// initialize available modules for chunks without parents
availableModulesMap.set(chunkGroup, new Set());
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
while (queue.length > 0) {
const chunkGroup = queue.dequeue();
let availableModules = availableModulesMap.get(chunkGroup);
let changed = false;
for (const parent of chunkGroup.parentsIterable) {
const availableModulesInParent = availableModulesMap.get(parent);
if (availableModulesInParent !== undefined) {
// If we know the available modules in parent: process these
if (availableModules === undefined) {
// if we have not own info yet: create new entry
availableModules = new Set(availableModulesInParent);
for (const chunk of parent.chunks) {
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
availableModules.add(m);
}
}
availableModulesMap.set(chunkGroup, availableModules);
changed = true;
} else {
for (const m of availableModules) {
if (
!chunkGraph.isModuleInChunkGroup(m, parent) &&
!availableModulesInParent.has(m)
) {
availableModules.delete(m);
changed = true;
}
}
}
}
}
if (changed) {
// if something changed: enqueue our children
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
}
// now we have available modules for every chunk
for (const chunk of chunks) {
const availableModulesSets = Array.from(
chunk.groupsIterable,
chunkGroup => availableModulesMap.get(chunkGroup)
);
if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group
const availableModules =
availableModulesSets.length === 1
? availableModulesSets[0]
: intersect(availableModulesSets);
const numberOfModules = chunkGraph.getNumberOfChunkModules(chunk);
const toRemove = new Set();
if (numberOfModules < availableModules.size) {
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
if (availableModules.has(m)) {
toRemove.add(m);
}
}
} else {
for (const m of availableModules) {
if (chunkGraph.isModuleInChunk(m, chunk)) {
toRemove.add(m);
}
}
}
for (const module of toRemove) {
chunkGraph.disconnectChunkAndModule(chunk, module);
}
}
};
compilation.hooks.optimizeChunks.tap(
{
name: "RemoveParentModulesPlugin",
stage: STAGE_BASIC
},
handler
);
});
}
}
module.exports = RemoveParentModulesPlugin;

View File

@@ -0,0 +1,44 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../Compiler")} Compiler */
class RuntimeChunkPlugin {
constructor(options) {
this.options = {
name: entrypoint => `runtime~${entrypoint.name}`,
...options
};
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.thisCompilation.tap("RuntimeChunkPlugin", compilation => {
compilation.hooks.addEntry.tap(
"RuntimeChunkPlugin",
(_, { name: entryName }) => {
if (entryName === undefined) return;
const data = compilation.entries.get(entryName);
if (data.options.runtime === undefined && !data.options.dependOn) {
// Determine runtime chunk name
let name = this.options.name;
if (typeof name === "function") {
name = name({ name: entryName });
}
data.options.runtime = name;
}
}
);
});
}
}
module.exports = RuntimeChunkPlugin;

View File

@@ -0,0 +1,337 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const glob2regexp = require("glob-to-regexp");
const { STAGE_DEFAULT } = require("../OptimizationStages");
const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency");
const formatLocation = require("../formatLocation");
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
/**
* @typedef {Object} ExportInModule
* @property {Module} module the module
* @property {string} exportName the name of the export
* @property {boolean} checked if the export is conditional
*/
/**
* @typedef {Object} ReexportInfo
* @property {Map<string, ExportInModule[]>} static
* @property {Map<Module, Set<string>>} dynamic
*/
/** @type {WeakMap<any, Map<string, RegExp>>} */
const globToRegexpCache = new WeakMap();
/**
* @param {string} glob the pattern
* @param {Map<string, RegExp>} cache the glob to RegExp cache
* @returns {RegExp} a regular expression
*/
const globToRegexp = (glob, cache) => {
const cacheEntry = cache.get(glob);
if (cacheEntry !== undefined) return cacheEntry;
if (!glob.includes("/")) {
glob = `**/${glob}`;
}
const baseRegexp = glob2regexp(glob, { globstar: true, extended: true });
const regexpSource = baseRegexp.source;
const regexp = new RegExp("^(\\./)?" + regexpSource.slice(1));
cache.set(glob, regexp);
return regexp;
};
class SideEffectsFlagPlugin {
/**
* @param {boolean} analyseSource analyse source code for side effects
*/
constructor(analyseSource = true) {
this._analyseSource = analyseSource;
}
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
let cache = globToRegexpCache.get(compiler.root);
if (cache === undefined) {
cache = new Map();
globToRegexpCache.set(compiler.root, cache);
}
compiler.hooks.compilation.tap(
"SideEffectsFlagPlugin",
(compilation, { normalModuleFactory }) => {
const moduleGraph = compilation.moduleGraph;
normalModuleFactory.hooks.module.tap(
"SideEffectsFlagPlugin",
(module, data) => {
const resolveData = data.resourceResolveData;
if (
resolveData &&
resolveData.descriptionFileData &&
resolveData.relativePath
) {
const sideEffects = resolveData.descriptionFileData.sideEffects;
if (sideEffects !== undefined) {
if (module.factoryMeta === undefined) {
module.factoryMeta = {};
}
const hasSideEffects =
SideEffectsFlagPlugin.moduleHasSideEffects(
resolveData.relativePath,
sideEffects,
cache
);
module.factoryMeta.sideEffectFree = !hasSideEffects;
}
}
return module;
}
);
normalModuleFactory.hooks.module.tap(
"SideEffectsFlagPlugin",
(module, data) => {
if (typeof data.settings.sideEffects === "boolean") {
if (module.factoryMeta === undefined) {
module.factoryMeta = {};
}
module.factoryMeta.sideEffectFree = !data.settings.sideEffects;
}
return module;
}
);
if (this._analyseSource) {
/**
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const parserHandler = parser => {
let sideEffectsStatement;
parser.hooks.program.tap("SideEffectsFlagPlugin", () => {
sideEffectsStatement = undefined;
});
parser.hooks.statement.tap(
{ name: "SideEffectsFlagPlugin", stage: -100 },
statement => {
if (sideEffectsStatement) return;
if (parser.scope.topLevelScope !== true) return;
switch (statement.type) {
case "ExpressionStatement":
if (
!parser.isPure(statement.expression, statement.range[0])
) {
sideEffectsStatement = statement;
}
break;
case "IfStatement":
case "WhileStatement":
case "DoWhileStatement":
if (!parser.isPure(statement.test, statement.range[0])) {
sideEffectsStatement = statement;
}
// statement hook will be called for child statements too
break;
case "ForStatement":
if (
!parser.isPure(statement.init, statement.range[0]) ||
!parser.isPure(
statement.test,
statement.init
? statement.init.range[1]
: statement.range[0]
) ||
!parser.isPure(
statement.update,
statement.test
? statement.test.range[1]
: statement.init
? statement.init.range[1]
: statement.range[0]
)
) {
sideEffectsStatement = statement;
}
// statement hook will be called for child statements too
break;
case "SwitchStatement":
if (
!parser.isPure(statement.discriminant, statement.range[0])
) {
sideEffectsStatement = statement;
}
// statement hook will be called for child statements too
break;
case "VariableDeclaration":
case "ClassDeclaration":
case "FunctionDeclaration":
if (!parser.isPure(statement, statement.range[0])) {
sideEffectsStatement = statement;
}
break;
case "ExportNamedDeclaration":
case "ExportDefaultDeclaration":
if (
!parser.isPure(statement.declaration, statement.range[0])
) {
sideEffectsStatement = statement;
}
break;
case "LabeledStatement":
case "BlockStatement":
// statement hook will be called for child statements too
break;
case "EmptyStatement":
break;
case "ExportAllDeclaration":
case "ImportDeclaration":
// imports will be handled by the dependencies
break;
default:
sideEffectsStatement = statement;
break;
}
}
);
parser.hooks.finish.tap("SideEffectsFlagPlugin", () => {
if (sideEffectsStatement === undefined) {
parser.state.module.buildMeta.sideEffectFree = true;
} else {
const { loc, type } = sideEffectsStatement;
moduleGraph
.getOptimizationBailout(parser.state.module)
.push(
() =>
`Statement (${type}) with side effects in source code at ${formatLocation(
loc
)}`
);
}
});
};
for (const key of [
"javascript/auto",
"javascript/esm",
"javascript/dynamic"
]) {
normalModuleFactory.hooks.parser
.for(key)
.tap("SideEffectsFlagPlugin", parserHandler);
}
}
compilation.hooks.optimizeDependencies.tap(
{
name: "SideEffectsFlagPlugin",
stage: STAGE_DEFAULT
},
modules => {
const logger = compilation.getLogger(
"webpack.SideEffectsFlagPlugin"
);
logger.time("update dependencies");
for (const module of modules) {
if (module.getSideEffectsConnectionState(moduleGraph) === false) {
const exportsInfo = moduleGraph.getExportsInfo(module);
for (const connection of moduleGraph.getIncomingConnections(
module
)) {
const dep = connection.dependency;
let isReexport;
if (
(isReexport =
dep instanceof
HarmonyExportImportedSpecifierDependency) ||
(dep instanceof HarmonyImportSpecifierDependency &&
!dep.namespaceObjectAsContext)
) {
// TODO improve for export *
if (isReexport && dep.name) {
const exportInfo = moduleGraph.getExportInfo(
connection.originModule,
dep.name
);
exportInfo.moveTarget(
moduleGraph,
({ module }) =>
module.getSideEffectsConnectionState(moduleGraph) ===
false,
({ module: newModule, export: exportName }) => {
moduleGraph.updateModule(dep, newModule);
moduleGraph.addExplanation(
dep,
"(skipped side-effect-free modules)"
);
const ids = dep.getIds(moduleGraph);
dep.setIds(
moduleGraph,
exportName
? [...exportName, ...ids.slice(1)]
: ids.slice(1)
);
return moduleGraph.getConnection(dep);
}
);
continue;
}
// TODO improve for nested imports
const ids = dep.getIds(moduleGraph);
if (ids.length > 0) {
const exportInfo = exportsInfo.getExportInfo(ids[0]);
const target = exportInfo.getTarget(
moduleGraph,
({ module }) =>
module.getSideEffectsConnectionState(moduleGraph) ===
false
);
if (!target) continue;
moduleGraph.updateModule(dep, target.module);
moduleGraph.addExplanation(
dep,
"(skipped side-effect-free modules)"
);
dep.setIds(
moduleGraph,
target.export
? [...target.export, ...ids.slice(1)]
: ids.slice(1)
);
}
}
}
}
}
logger.timeEnd("update dependencies");
}
);
}
);
}
static moduleHasSideEffects(moduleName, flagValue, cache) {
switch (typeof flagValue) {
case "undefined":
return true;
case "boolean":
return flagValue;
case "string":
return globToRegexp(flagValue, cache).test(moduleName);
case "object":
return flagValue.some(glob =>
SideEffectsFlagPlugin.moduleHasSideEffects(moduleName, glob, cache)
);
}
}
}
module.exports = SideEffectsFlagPlugin;

1723
node_modules/webpack/lib/optimize/SplitChunksPlugin.js generated vendored Normal file

File diff suppressed because it is too large Load Diff