node_modules
.bin
@ampproject
@babel
@discoveryjs
@gar
@istanbuljs
@jridgewell
@leichtgewicht
@npmcli
@riotjs
@tootallnate
@types
@ungap
@webassemblyjs
@webpack-cli
@xtuc
abab
abbrev
accepts
acorn
acorn-globals
acorn-import-assertions
acorn-walk
agent-base
agentkeepalive
aggregate-error
ajv
ajv-formats
ajv-keywords
ansi-colors
ansi-html-community
ansi-regex
ansi-styles
anymatch
append-transform
aproba
archy
are-we-there-yet
argparse
array-flatten
arrify
assertion-error
ast-types
async-foreach
asynckit
babel-plugin-dynamic-import-node
babel-plugin-polyfill-corejs2
babel-plugin-polyfill-corejs3
babel-plugin-polyfill-regenerator
balanced-match
batch
bianco.attr
bianco.dom-to-array
bianco.events
bianco.query
big.js
binary-extensions
body-parser
bonjour-service
boolbase
brace-expansion
braces
browser-process-hrtime
browser-stdout
browserslist
buffer-from
bytes
cacache
caching-transform
call-bind
camel-case
camelcase
camelcase-keys
caniuse-lite
chai
chalk
check-error
chokidar
chownr
chrome-trace-event
clean-css
clean-stack
cliui
clone-deep
color-convert
color-name
color-support
colorette
combined-stream
commander
commondir
compressible
compression
concat-map
connect-history-api-fallback
console-control-strings
content-disposition
content-type
convert-source-map
cookie
cookie-signature
core-js-compat
core-util-is
cross-spawn
css-select
css-what
cssesc
cssom
cssstyle
cumpa
curri
data-urls
debug
decamelize
decamelize-keys
decimal.js
deep-eql
deep-is
default-gateway
default-require-extensions
define-lazy-prop
define-properties
delayed-stream
delegates
depd
destroy
detect-node
diff
dns-equal
dns-packet
dom-converter
dom-nodes
dom-serializer
domelementtype
domexception
domhandler
domutils
dot-case
ee-first
electron-to-chromium
emoji-regex
emojis-list
encodeurl
encoding
enhanced-resolve
entities
env-paths
envinfo
err-code
erre
error-ex
es-module-lexer
es6-error
escalade
escape-html
escape-string-regexp
escodegen
eslint-scope
esm
esprima
esrecurse
estraverse
esutils
etag
eventemitter3
events
execa
express
fast-deep-equal
fast-json-stable-stringify
fast-levenshtein
fastest-levenshtein
faye-websocket
fill-range
finalhandler
find-up
flat
follow-redirects
foreground-child
form-data
forwarded
fresh
fromentries
fs-minipass
fs-monkey
fs.realpath
function-bind
gauge
gaze
gensync
get-caller-file
get-func-name
get-intrinsic
get-package-type
get-stdin
get-stream
glob
glob-parent
glob-to-regexp
globals
globule
graceful-fs
growl
handle-thing
hard-rejection
has
has-flag
has-property-descriptors
has-symbols
has-unicode
hasha
he
hosted-git-info
hpack.js
html-encoding-sniffer
html-entities
html-escaper
html-minifier-terser
html-webpack-plugin
htmlparser2
http-cache-semantics
http-deceiver
http-errors
http-parser-js
http-proxy
http-proxy-agent
http-proxy-middleware
https-proxy-agent
human-signals
humanize-ms
iconv-lite
imurmurhash
indent-string
infer-owner
inflight
inherits
interpret
ip
ipaddr.js
is-arrayish
is-binary-path
is-core-module
is-docker
is-extglob
is-fullwidth-code-point
is-glob
is-lambda
is-number
is-plain-obj
is-plain-object
is-potential-custom-element-name
is-stream
is-typedarray
is-windows
is-wsl
isarray
isexe
isobject
istanbul-lib-coverage
istanbul-lib-hook
istanbul-lib-instrument
istanbul-lib-processinfo
istanbul-lib-report
istanbul-lib-source-maps
istanbul-reports
jest-worker
js-base64
js-tokens
js-yaml
jsdom
jsdom-global
jsesc
json-parse-even-better-errors
json-schema-traverse
json5
kind-of
levn
lines-and-columns
loader-runner
locate-path
lodash
lodash.debounce
lodash.flattendeep
log-symbols
loupe
lower-case
lru-cache
make-fetch-happen
map-obj
media-typer
memfs
meow
merge-descriptors
merge-stream
methods
micromatch
mime
mime-db
mime-types
mimic-fn
min-indent
minimalistic-assert
minimatch
minimist-options
minipass
minipass-collect
minipass-fetch
minipass-flush
minipass-pipeline
minipass-sized
minizlib
mkdirp
mocha
ms
multicast-dns
nan
nanoid
negotiator
neo-async
no-case
node-forge
node-gyp
node-preload
node-releases
node-sass
nopt
normalize-package-data
normalize-path
npm-run-path
npmlog
nth-check
nwsapi
nyc
object-inspect
object-keys
object.assign
obuf
on-finished
on-headers
once
onetime
open
optionator
p-limit
p-locate
p-map
p-retry
p-try
package-hash
param-case
parse-json
parse5
parseurl
pascal-case
path-exists
path-is-absolute
path-key
path-parse
path-to-regexp
pathval
picocolors
picomatch
pirates
prelude-ls
pretty-error
process-nextick-args
process-on-spawn
promise-inflight
promise-retry
proxy-addr
psl
punycode
qs
quick-lru
randombytes
range-parser
raw-body
rawth
read-pkg
read-pkg-up
readable-stream
readdirp
recast
rechoir
redent
regenerate
regenerate-unicode-properties
regenerator-runtime
regenerator-transform
regexpu-core
regjsgen
regjsparser
relateurl
release-zalgo
renderkid
require-directory
require-from-string
require-main-filename
requires-port
resolve
retry
rimraf
riot
ruit
safe-buffer
safer-buffer
sass-graph
saxes
schema-utils
scss-tokenizer
select-hose
selfsigned
semver
send
serialize-javascript
serve-index
serve-static
set-blocking
setprototypeof
shallow-clone
shebang-command
shebang-regex
side-channel
signal-exit
smart-buffer
sockjs
socks
socks-proxy-agent
source-map
source-map-support
spawn-wrap
spdx-correct
spdx-exceptions
spdx-expression-parse
spdx-license-ids
spdy
spdy-transport
sprintf-js
ssri
statuses
stdout-stream
string-width
string_decoder
strip-ansi
strip-final-newline
strip-indent
strip-json-comments
supports-color
symbol-tree
tapable
tar
terser
terser-webpack-plugin
test-exclude
thunky
to-fast-properties
to-regex-range
toidentifier
tough-cookie
tr46
trim-newlines
true-case-path
tslib
type-check
type-detect
type-fest
type-is
typedarray-to-buffer
unicode-canonical-property-names-ecmascript
unicode-match-property-ecmascript
unicode-match-property-value-ecmascript
unicode-property-aliases-ecmascript
unique-filename
unique-slug
universalify
unpipe
update-browserslist-db
uri-js
util-deprecate
utila
utils-merge
uuid
validate-npm-package-license
vary
w3c-hr-time
w3c-xmlserializer
watchpack
wbuf
webidl-conversions
webpack
bin
hot
lib
asset
async-modules
cache
config
container
css
debug
dependencies
electron
errors
esm
hmr
ids
javascript
json
library
logging
node
optimize
performance
prefetch
rules
runtime
schemes
serialization
sharing
stats
util
wasm
wasm-async
wasm-sync
web
webworker
APIPlugin.js
AbstractMethodError.js
AsyncDependenciesBlock.js
AsyncDependencyToInitialChunkError.js
AutomaticPrefetchPlugin.js
BannerPlugin.js
Cache.js
CacheFacade.js
CaseSensitiveModulesWarning.js
Chunk.js
ChunkGraph.js
ChunkGroup.js
ChunkRenderError.js
ChunkTemplate.js
CleanPlugin.js
CodeGenerationError.js
CodeGenerationResults.js
CommentCompilationWarning.js
CompatibilityPlugin.js
Compilation.js
Compiler.js
ConcatenationScope.js
ConcurrentCompilationError.js
ConditionalInitFragment.js
ConstPlugin.js
ContextExclusionPlugin.js
ContextModule.js
ContextModuleFactory.js
ContextReplacementPlugin.js
DefinePlugin.js
DelegatedModule.js
DelegatedModuleFactoryPlugin.js
DelegatedPlugin.js
DependenciesBlock.js
Dependency.js
DependencyTemplate.js
DependencyTemplates.js
DllEntryPlugin.js
DllModule.js
DllModuleFactory.js
DllPlugin.js
DllReferencePlugin.js
DynamicEntryPlugin.js
EntryOptionPlugin.js
EntryPlugin.js
Entrypoint.js
EnvironmentPlugin.js
ErrorHelpers.js
EvalDevToolModulePlugin.js
EvalSourceMapDevToolPlugin.js
ExportsInfo.js
ExportsInfoApiPlugin.js
ExternalModule.js
ExternalModuleFactoryPlugin.js
ExternalsPlugin.js
FileSystemInfo.js
FlagAllModulesAsUsedPlugin.js
FlagDependencyExportsPlugin.js
FlagDependencyUsagePlugin.js
FlagEntryExportAsUsedPlugin.js
Generator.js
GraphHelpers.js
HarmonyLinkingError.js
HookWebpackError.js
HotModuleReplacementPlugin.js
HotUpdateChunk.js
IgnoreErrorModuleFactory.js
IgnorePlugin.js
IgnoreWarningsPlugin.js
InitFragment.js
InvalidDependenciesModuleWarning.js
JavascriptMetaInfoPlugin.js
LibManifestPlugin.js
LibraryTemplatePlugin.js
LoaderOptionsPlugin.js
LoaderTargetPlugin.js
MainTemplate.js
Module.js
ModuleBuildError.js
ModuleDependencyError.js
ModuleDependencyWarning.js
ModuleError.js
ModuleFactory.js
ModuleFilenameHelpers.js
ModuleGraph.js
ModuleGraphConnection.js
ModuleHashingError.js
ModuleInfoHeaderPlugin.js
ModuleNotFoundError.js
ModuleParseError.js
ModuleProfile.js
ModuleRestoreError.js
ModuleStoreError.js
ModuleTemplate.js
ModuleWarning.js
MultiCompiler.js
MultiStats.js
MultiWatching.js
NoEmitOnErrorsPlugin.js
NoModeWarning.js
NodeStuffInWebError.js
NodeStuffPlugin.js
NormalModule.js
NormalModuleFactory.js
NormalModuleReplacementPlugin.js
NullFactory.js
OptimizationStages.js
OptionsApply.js
Parser.js
PrefetchPlugin.js
ProgressPlugin.js
ProvidePlugin.js
RawModule.js
RecordIdsPlugin.js
RequestShortener.js
RequireJsStuffPlugin.js
ResolverFactory.js
RuntimeGlobals.js
RuntimeModule.js
RuntimePlugin.js
RuntimeTemplate.js
SelfModuleFactory.js
SingleEntryPlugin.js
SizeFormatHelpers.js
SourceMapDevToolModuleOptionsPlugin.js
SourceMapDevToolPlugin.js
Stats.js
Template.js
TemplatedPathPlugin.js
UnhandledSchemeError.js
UnsupportedFeatureWarning.js
UseStrictPlugin.js
WarnCaseSensitiveModulesPlugin.js
WarnDeprecatedOptionPlugin.js
WarnNoModeSetPlugin.js
WatchIgnorePlugin.js
Watching.js
WebpackError.js
WebpackIsIncludedPlugin.js
WebpackOptionsApply.js
WebpackOptionsDefaulter.js
buildChunkGraph.js
cli.js
formatLocation.js
index.js
validateSchema.js
webpack.js
node_modules
schemas
LICENSE
README.md
SECURITY.md
module.d.ts
package.json
types.d.ts
webpack-cli
webpack-dev-middleware
webpack-dev-server
webpack-merge
webpack-sources
websocket-driver
websocket-extensions
whatwg-encoding
whatwg-mimetype
whatwg-url
which
which-module
wide-align
wildcard
word-wrap
workerpool
wrap-ansi
wrappy
write-file-atomic
ws
xml-name-validator
xmlchars
y18n
yallist
yargs
yargs-parser
yargs-unparser
yocto-queue
.package-lock.json
src
LICENSE
package-lock.json
package.json
readme.md
webpack.config.js
1231 lines
35 KiB
JavaScript
1231 lines
35 KiB
JavaScript
![]() |
/*
|
||
|
MIT License http://www.opensource.org/licenses/mit-license.php
|
||
|
Author Tobias Koppers @sokra
|
||
|
*/
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
const parseJson = require("json-parse-even-better-errors");
|
||
|
const asyncLib = require("neo-async");
|
||
|
const {
|
||
|
SyncHook,
|
||
|
SyncBailHook,
|
||
|
AsyncParallelHook,
|
||
|
AsyncSeriesHook
|
||
|
} = require("tapable");
|
||
|
const { SizeOnlySource } = require("webpack-sources");
|
||
|
const webpack = require("./");
|
||
|
const Cache = require("./Cache");
|
||
|
const CacheFacade = require("./CacheFacade");
|
||
|
const ChunkGraph = require("./ChunkGraph");
|
||
|
const Compilation = require("./Compilation");
|
||
|
const ConcurrentCompilationError = require("./ConcurrentCompilationError");
|
||
|
const ContextModuleFactory = require("./ContextModuleFactory");
|
||
|
const ModuleGraph = require("./ModuleGraph");
|
||
|
const NormalModuleFactory = require("./NormalModuleFactory");
|
||
|
const RequestShortener = require("./RequestShortener");
|
||
|
const ResolverFactory = require("./ResolverFactory");
|
||
|
const Stats = require("./Stats");
|
||
|
const Watching = require("./Watching");
|
||
|
const WebpackError = require("./WebpackError");
|
||
|
const { Logger } = require("./logging/Logger");
|
||
|
const { join, dirname, mkdirp } = require("./util/fs");
|
||
|
const { makePathsRelative } = require("./util/identifier");
|
||
|
const { isSourceEqual } = require("./util/source");
|
||
|
|
||
|
/** @typedef {import("webpack-sources").Source} Source */
|
||
|
/** @typedef {import("../declarations/WebpackOptions").EntryNormalized} Entry */
|
||
|
/** @typedef {import("../declarations/WebpackOptions").OutputNormalized} OutputOptions */
|
||
|
/** @typedef {import("../declarations/WebpackOptions").WatchOptions} WatchOptions */
|
||
|
/** @typedef {import("../declarations/WebpackOptions").WebpackOptionsNormalized} WebpackOptions */
|
||
|
/** @typedef {import("../declarations/WebpackOptions").WebpackPluginInstance} WebpackPluginInstance */
|
||
|
/** @typedef {import("./Chunk")} Chunk */
|
||
|
/** @typedef {import("./Dependency")} Dependency */
|
||
|
/** @typedef {import("./FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
|
||
|
/** @typedef {import("./Module")} Module */
|
||
|
/** @typedef {import("./util/WeakTupleMap")} WeakTupleMap */
|
||
|
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
|
||
|
/** @typedef {import("./util/fs").IntermediateFileSystem} IntermediateFileSystem */
|
||
|
/** @typedef {import("./util/fs").OutputFileSystem} OutputFileSystem */
|
||
|
/** @typedef {import("./util/fs").WatchFileSystem} WatchFileSystem */
|
||
|
|
||
|
/**
|
||
|
* @typedef {Object} CompilationParams
|
||
|
* @property {NormalModuleFactory} normalModuleFactory
|
||
|
* @property {ContextModuleFactory} contextModuleFactory
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @template T
|
||
|
* @callback Callback
|
||
|
* @param {(Error | null)=} err
|
||
|
* @param {T=} result
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @callback RunAsChildCallback
|
||
|
* @param {(Error | null)=} err
|
||
|
* @param {Chunk[]=} entries
|
||
|
* @param {Compilation=} compilation
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @typedef {Object} AssetEmittedInfo
|
||
|
* @property {Buffer} content
|
||
|
* @property {Source} source
|
||
|
* @property {Compilation} compilation
|
||
|
* @property {string} outputPath
|
||
|
* @property {string} targetPath
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @param {string[]} array an array
|
||
|
* @returns {boolean} true, if the array is sorted
|
||
|
*/
|
||
|
const isSorted = array => {
|
||
|
for (let i = 1; i < array.length; i++) {
|
||
|
if (array[i - 1] > array[i]) return false;
|
||
|
}
|
||
|
return true;
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @param {Object} obj an object
|
||
|
* @param {string[]} keys the keys of the object
|
||
|
* @returns {Object} the object with properties sorted by property name
|
||
|
*/
|
||
|
const sortObject = (obj, keys) => {
|
||
|
const o = {};
|
||
|
for (const k of keys.sort()) {
|
||
|
o[k] = obj[k];
|
||
|
}
|
||
|
return o;
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @param {string} filename filename
|
||
|
* @param {string | string[] | undefined} hashes list of hashes
|
||
|
* @returns {boolean} true, if the filename contains any hash
|
||
|
*/
|
||
|
const includesHash = (filename, hashes) => {
|
||
|
if (!hashes) return false;
|
||
|
if (Array.isArray(hashes)) {
|
||
|
return hashes.some(hash => filename.includes(hash));
|
||
|
} else {
|
||
|
return filename.includes(hashes);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
class Compiler {
|
||
|
/**
|
||
|
* @param {string} context the compilation path
|
||
|
* @param {WebpackOptions} options options
|
||
|
*/
|
||
|
constructor(context, options = /** @type {WebpackOptions} */ ({})) {
|
||
|
this.hooks = Object.freeze({
|
||
|
/** @type {SyncHook<[]>} */
|
||
|
initialize: new SyncHook([]),
|
||
|
|
||
|
/** @type {SyncBailHook<[Compilation], boolean>} */
|
||
|
shouldEmit: new SyncBailHook(["compilation"]),
|
||
|
/** @type {AsyncSeriesHook<[Stats]>} */
|
||
|
done: new AsyncSeriesHook(["stats"]),
|
||
|
/** @type {SyncHook<[Stats]>} */
|
||
|
afterDone: new SyncHook(["stats"]),
|
||
|
/** @type {AsyncSeriesHook<[]>} */
|
||
|
additionalPass: new AsyncSeriesHook([]),
|
||
|
/** @type {AsyncSeriesHook<[Compiler]>} */
|
||
|
beforeRun: new AsyncSeriesHook(["compiler"]),
|
||
|
/** @type {AsyncSeriesHook<[Compiler]>} */
|
||
|
run: new AsyncSeriesHook(["compiler"]),
|
||
|
/** @type {AsyncSeriesHook<[Compilation]>} */
|
||
|
emit: new AsyncSeriesHook(["compilation"]),
|
||
|
/** @type {AsyncSeriesHook<[string, AssetEmittedInfo]>} */
|
||
|
assetEmitted: new AsyncSeriesHook(["file", "info"]),
|
||
|
/** @type {AsyncSeriesHook<[Compilation]>} */
|
||
|
afterEmit: new AsyncSeriesHook(["compilation"]),
|
||
|
|
||
|
/** @type {SyncHook<[Compilation, CompilationParams]>} */
|
||
|
thisCompilation: new SyncHook(["compilation", "params"]),
|
||
|
/** @type {SyncHook<[Compilation, CompilationParams]>} */
|
||
|
compilation: new SyncHook(["compilation", "params"]),
|
||
|
/** @type {SyncHook<[NormalModuleFactory]>} */
|
||
|
normalModuleFactory: new SyncHook(["normalModuleFactory"]),
|
||
|
/** @type {SyncHook<[ContextModuleFactory]>} */
|
||
|
contextModuleFactory: new SyncHook(["contextModuleFactory"]),
|
||
|
|
||
|
/** @type {AsyncSeriesHook<[CompilationParams]>} */
|
||
|
beforeCompile: new AsyncSeriesHook(["params"]),
|
||
|
/** @type {SyncHook<[CompilationParams]>} */
|
||
|
compile: new SyncHook(["params"]),
|
||
|
/** @type {AsyncParallelHook<[Compilation]>} */
|
||
|
make: new AsyncParallelHook(["compilation"]),
|
||
|
/** @type {AsyncParallelHook<[Compilation]>} */
|
||
|
finishMake: new AsyncSeriesHook(["compilation"]),
|
||
|
/** @type {AsyncSeriesHook<[Compilation]>} */
|
||
|
afterCompile: new AsyncSeriesHook(["compilation"]),
|
||
|
|
||
|
/** @type {AsyncSeriesHook<[]>} */
|
||
|
readRecords: new AsyncSeriesHook([]),
|
||
|
/** @type {AsyncSeriesHook<[]>} */
|
||
|
emitRecords: new AsyncSeriesHook([]),
|
||
|
|
||
|
/** @type {AsyncSeriesHook<[Compiler]>} */
|
||
|
watchRun: new AsyncSeriesHook(["compiler"]),
|
||
|
/** @type {SyncHook<[Error]>} */
|
||
|
failed: new SyncHook(["error"]),
|
||
|
/** @type {SyncHook<[string | null, number]>} */
|
||
|
invalid: new SyncHook(["filename", "changeTime"]),
|
||
|
/** @type {SyncHook<[]>} */
|
||
|
watchClose: new SyncHook([]),
|
||
|
/** @type {AsyncSeriesHook<[]>} */
|
||
|
shutdown: new AsyncSeriesHook([]),
|
||
|
|
||
|
/** @type {SyncBailHook<[string, string, any[]], true>} */
|
||
|
infrastructureLog: new SyncBailHook(["origin", "type", "args"]),
|
||
|
|
||
|
// TODO the following hooks are weirdly located here
|
||
|
// TODO move them for webpack 5
|
||
|
/** @type {SyncHook<[]>} */
|
||
|
environment: new SyncHook([]),
|
||
|
/** @type {SyncHook<[]>} */
|
||
|
afterEnvironment: new SyncHook([]),
|
||
|
/** @type {SyncHook<[Compiler]>} */
|
||
|
afterPlugins: new SyncHook(["compiler"]),
|
||
|
/** @type {SyncHook<[Compiler]>} */
|
||
|
afterResolvers: new SyncHook(["compiler"]),
|
||
|
/** @type {SyncBailHook<[string, Entry], boolean>} */
|
||
|
entryOption: new SyncBailHook(["context", "entry"])
|
||
|
});
|
||
|
|
||
|
this.webpack = webpack;
|
||
|
|
||
|
/** @type {string=} */
|
||
|
this.name = undefined;
|
||
|
/** @type {Compilation=} */
|
||
|
this.parentCompilation = undefined;
|
||
|
/** @type {Compiler} */
|
||
|
this.root = this;
|
||
|
/** @type {string} */
|
||
|
this.outputPath = "";
|
||
|
/** @type {Watching} */
|
||
|
this.watching = undefined;
|
||
|
|
||
|
/** @type {OutputFileSystem} */
|
||
|
this.outputFileSystem = null;
|
||
|
/** @type {IntermediateFileSystem} */
|
||
|
this.intermediateFileSystem = null;
|
||
|
/** @type {InputFileSystem} */
|
||
|
this.inputFileSystem = null;
|
||
|
/** @type {WatchFileSystem} */
|
||
|
this.watchFileSystem = null;
|
||
|
|
||
|
/** @type {string|null} */
|
||
|
this.recordsInputPath = null;
|
||
|
/** @type {string|null} */
|
||
|
this.recordsOutputPath = null;
|
||
|
this.records = {};
|
||
|
/** @type {Set<string | RegExp>} */
|
||
|
this.managedPaths = new Set();
|
||
|
/** @type {Set<string | RegExp>} */
|
||
|
this.immutablePaths = new Set();
|
||
|
|
||
|
/** @type {ReadonlySet<string>} */
|
||
|
this.modifiedFiles = undefined;
|
||
|
/** @type {ReadonlySet<string>} */
|
||
|
this.removedFiles = undefined;
|
||
|
/** @type {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null>} */
|
||
|
this.fileTimestamps = undefined;
|
||
|
/** @type {ReadonlyMap<string, FileSystemInfoEntry | "ignore" | null>} */
|
||
|
this.contextTimestamps = undefined;
|
||
|
/** @type {number} */
|
||
|
this.fsStartTime = undefined;
|
||
|
|
||
|
/** @type {ResolverFactory} */
|
||
|
this.resolverFactory = new ResolverFactory();
|
||
|
|
||
|
this.infrastructureLogger = undefined;
|
||
|
|
||
|
this.options = options;
|
||
|
|
||
|
this.context = context;
|
||
|
|
||
|
this.requestShortener = new RequestShortener(context, this.root);
|
||
|
|
||
|
this.cache = new Cache();
|
||
|
|
||
|
/** @type {Map<Module, { buildInfo: object, references: WeakMap<Dependency, Module>, memCache: WeakTupleMap }> | undefined} */
|
||
|
this.moduleMemCaches = undefined;
|
||
|
|
||
|
this.compilerPath = "";
|
||
|
|
||
|
/** @type {boolean} */
|
||
|
this.running = false;
|
||
|
|
||
|
/** @type {boolean} */
|
||
|
this.idle = false;
|
||
|
|
||
|
/** @type {boolean} */
|
||
|
this.watchMode = false;
|
||
|
|
||
|
this._backCompat = this.options.experiments.backCompat !== false;
|
||
|
|
||
|
/** @type {Compilation} */
|
||
|
this._lastCompilation = undefined;
|
||
|
/** @type {NormalModuleFactory} */
|
||
|
this._lastNormalModuleFactory = undefined;
|
||
|
|
||
|
/** @private @type {WeakMap<Source, { sizeOnlySource: SizeOnlySource, writtenTo: Map<string, number> }>} */
|
||
|
this._assetEmittingSourceCache = new WeakMap();
|
||
|
/** @private @type {Map<string, number>} */
|
||
|
this._assetEmittingWrittenFiles = new Map();
|
||
|
/** @private @type {Set<string>} */
|
||
|
this._assetEmittingPreviousFiles = new Set();
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {string} name cache name
|
||
|
* @returns {CacheFacade} the cache facade instance
|
||
|
*/
|
||
|
getCache(name) {
|
||
|
return new CacheFacade(
|
||
|
this.cache,
|
||
|
`${this.compilerPath}${name}`,
|
||
|
this.options.output.hashFunction
|
||
|
);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {string | (function(): string)} name name of the logger, or function called once to get the logger name
|
||
|
* @returns {Logger} a logger with that name
|
||
|
*/
|
||
|
getInfrastructureLogger(name) {
|
||
|
if (!name) {
|
||
|
throw new TypeError(
|
||
|
"Compiler.getInfrastructureLogger(name) called without a name"
|
||
|
);
|
||
|
}
|
||
|
return new Logger(
|
||
|
(type, args) => {
|
||
|
if (typeof name === "function") {
|
||
|
name = name();
|
||
|
if (!name) {
|
||
|
throw new TypeError(
|
||
|
"Compiler.getInfrastructureLogger(name) called with a function not returning a name"
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
if (this.hooks.infrastructureLog.call(name, type, args) === undefined) {
|
||
|
if (this.infrastructureLogger !== undefined) {
|
||
|
this.infrastructureLogger(name, type, args);
|
||
|
}
|
||
|
}
|
||
|
},
|
||
|
childName => {
|
||
|
if (typeof name === "function") {
|
||
|
if (typeof childName === "function") {
|
||
|
return this.getInfrastructureLogger(() => {
|
||
|
if (typeof name === "function") {
|
||
|
name = name();
|
||
|
if (!name) {
|
||
|
throw new TypeError(
|
||
|
"Compiler.getInfrastructureLogger(name) called with a function not returning a name"
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
if (typeof childName === "function") {
|
||
|
childName = childName();
|
||
|
if (!childName) {
|
||
|
throw new TypeError(
|
||
|
"Logger.getChildLogger(name) called with a function not returning a name"
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
return `${name}/${childName}`;
|
||
|
});
|
||
|
} else {
|
||
|
return this.getInfrastructureLogger(() => {
|
||
|
if (typeof name === "function") {
|
||
|
name = name();
|
||
|
if (!name) {
|
||
|
throw new TypeError(
|
||
|
"Compiler.getInfrastructureLogger(name) called with a function not returning a name"
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
return `${name}/${childName}`;
|
||
|
});
|
||
|
}
|
||
|
} else {
|
||
|
if (typeof childName === "function") {
|
||
|
return this.getInfrastructureLogger(() => {
|
||
|
if (typeof childName === "function") {
|
||
|
childName = childName();
|
||
|
if (!childName) {
|
||
|
throw new TypeError(
|
||
|
"Logger.getChildLogger(name) called with a function not returning a name"
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
return `${name}/${childName}`;
|
||
|
});
|
||
|
} else {
|
||
|
return this.getInfrastructureLogger(`${name}/${childName}`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
|
||
|
// TODO webpack 6: solve this in a better way
|
||
|
// e.g. move compilation specific info from Modules into ModuleGraph
|
||
|
_cleanupLastCompilation() {
|
||
|
if (this._lastCompilation !== undefined) {
|
||
|
for (const module of this._lastCompilation.modules) {
|
||
|
ChunkGraph.clearChunkGraphForModule(module);
|
||
|
ModuleGraph.clearModuleGraphForModule(module);
|
||
|
module.cleanupForCache();
|
||
|
}
|
||
|
for (const chunk of this._lastCompilation.chunks) {
|
||
|
ChunkGraph.clearChunkGraphForChunk(chunk);
|
||
|
}
|
||
|
this._lastCompilation = undefined;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// TODO webpack 6: solve this in a better way
|
||
|
_cleanupLastNormalModuleFactory() {
|
||
|
if (this._lastNormalModuleFactory !== undefined) {
|
||
|
this._lastNormalModuleFactory.cleanupForCache();
|
||
|
this._lastNormalModuleFactory = undefined;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {WatchOptions} watchOptions the watcher's options
|
||
|
* @param {Callback<Stats>} handler signals when the call finishes
|
||
|
* @returns {Watching} a compiler watcher
|
||
|
*/
|
||
|
watch(watchOptions, handler) {
|
||
|
if (this.running) {
|
||
|
return handler(new ConcurrentCompilationError());
|
||
|
}
|
||
|
|
||
|
this.running = true;
|
||
|
this.watchMode = true;
|
||
|
this.watching = new Watching(this, watchOptions, handler);
|
||
|
return this.watching;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<Stats>} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
run(callback) {
|
||
|
if (this.running) {
|
||
|
return callback(new ConcurrentCompilationError());
|
||
|
}
|
||
|
|
||
|
let logger;
|
||
|
|
||
|
const finalCallback = (err, stats) => {
|
||
|
if (logger) logger.time("beginIdle");
|
||
|
this.idle = true;
|
||
|
this.cache.beginIdle();
|
||
|
this.idle = true;
|
||
|
if (logger) logger.timeEnd("beginIdle");
|
||
|
this.running = false;
|
||
|
if (err) {
|
||
|
this.hooks.failed.call(err);
|
||
|
}
|
||
|
if (callback !== undefined) callback(err, stats);
|
||
|
this.hooks.afterDone.call(stats);
|
||
|
};
|
||
|
|
||
|
const startTime = Date.now();
|
||
|
|
||
|
this.running = true;
|
||
|
|
||
|
const onCompiled = (err, compilation) => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
if (this.hooks.shouldEmit.call(compilation) === false) {
|
||
|
compilation.startTime = startTime;
|
||
|
compilation.endTime = Date.now();
|
||
|
const stats = new Stats(compilation);
|
||
|
this.hooks.done.callAsync(stats, err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
return finalCallback(null, stats);
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
process.nextTick(() => {
|
||
|
logger = compilation.getLogger("webpack.Compiler");
|
||
|
logger.time("emitAssets");
|
||
|
this.emitAssets(compilation, err => {
|
||
|
logger.timeEnd("emitAssets");
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
if (compilation.hooks.needAdditionalPass.call()) {
|
||
|
compilation.needAdditionalPass = true;
|
||
|
|
||
|
compilation.startTime = startTime;
|
||
|
compilation.endTime = Date.now();
|
||
|
logger.time("done hook");
|
||
|
const stats = new Stats(compilation);
|
||
|
this.hooks.done.callAsync(stats, err => {
|
||
|
logger.timeEnd("done hook");
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.hooks.additionalPass.callAsync(err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
this.compile(onCompiled);
|
||
|
});
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
logger.time("emitRecords");
|
||
|
this.emitRecords(err => {
|
||
|
logger.timeEnd("emitRecords");
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
compilation.startTime = startTime;
|
||
|
compilation.endTime = Date.now();
|
||
|
logger.time("done hook");
|
||
|
const stats = new Stats(compilation);
|
||
|
this.hooks.done.callAsync(stats, err => {
|
||
|
logger.timeEnd("done hook");
|
||
|
if (err) return finalCallback(err);
|
||
|
this.cache.storeBuildDependencies(
|
||
|
compilation.buildDependencies,
|
||
|
err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
return finalCallback(null, stats);
|
||
|
}
|
||
|
);
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const run = () => {
|
||
|
this.hooks.beforeRun.callAsync(this, err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.hooks.run.callAsync(this, err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.readRecords(err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.compile(onCompiled);
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
};
|
||
|
|
||
|
if (this.idle) {
|
||
|
this.cache.endIdle(err => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.idle = false;
|
||
|
run();
|
||
|
});
|
||
|
} else {
|
||
|
run();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {RunAsChildCallback} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
runAsChild(callback) {
|
||
|
const startTime = Date.now();
|
||
|
|
||
|
const finalCallback = (err, entries, compilation) => {
|
||
|
try {
|
||
|
callback(err, entries, compilation);
|
||
|
} catch (e) {
|
||
|
const err = new WebpackError(
|
||
|
`compiler.runAsChild callback error: ${e}`
|
||
|
);
|
||
|
err.details = e.stack;
|
||
|
this.parentCompilation.errors.push(err);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
this.compile((err, compilation) => {
|
||
|
if (err) return finalCallback(err);
|
||
|
|
||
|
this.parentCompilation.children.push(compilation);
|
||
|
for (const { name, source, info } of compilation.getAssets()) {
|
||
|
this.parentCompilation.emitAsset(name, source, info);
|
||
|
}
|
||
|
|
||
|
const entries = [];
|
||
|
for (const ep of compilation.entrypoints.values()) {
|
||
|
entries.push(...ep.chunks);
|
||
|
}
|
||
|
|
||
|
compilation.startTime = startTime;
|
||
|
compilation.endTime = Date.now();
|
||
|
|
||
|
return finalCallback(null, entries, compilation);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
purgeInputFileSystem() {
|
||
|
if (this.inputFileSystem && this.inputFileSystem.purge) {
|
||
|
this.inputFileSystem.purge();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Compilation} compilation the compilation
|
||
|
* @param {Callback<void>} callback signals when the assets are emitted
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
emitAssets(compilation, callback) {
|
||
|
let outputPath;
|
||
|
|
||
|
const emitFiles = err => {
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
const assets = compilation.getAssets();
|
||
|
compilation.assets = { ...compilation.assets };
|
||
|
/** @type {Map<string, { path: string, source: Source, size: number, waiting: { cacheEntry: any, file: string }[] }>} */
|
||
|
const caseInsensitiveMap = new Map();
|
||
|
/** @type {Set<string>} */
|
||
|
const allTargetPaths = new Set();
|
||
|
asyncLib.forEachLimit(
|
||
|
assets,
|
||
|
15,
|
||
|
({ name: file, source, info }, callback) => {
|
||
|
let targetFile = file;
|
||
|
let immutable = info.immutable;
|
||
|
const queryStringIdx = targetFile.indexOf("?");
|
||
|
if (queryStringIdx >= 0) {
|
||
|
targetFile = targetFile.slice(0, queryStringIdx);
|
||
|
// We may remove the hash, which is in the query string
|
||
|
// So we recheck if the file is immutable
|
||
|
// This doesn't cover all cases, but immutable is only a performance optimization anyway
|
||
|
immutable =
|
||
|
immutable &&
|
||
|
(includesHash(targetFile, info.contenthash) ||
|
||
|
includesHash(targetFile, info.chunkhash) ||
|
||
|
includesHash(targetFile, info.modulehash) ||
|
||
|
includesHash(targetFile, info.fullhash));
|
||
|
}
|
||
|
|
||
|
const writeOut = err => {
|
||
|
if (err) return callback(err);
|
||
|
const targetPath = join(
|
||
|
this.outputFileSystem,
|
||
|
outputPath,
|
||
|
targetFile
|
||
|
);
|
||
|
allTargetPaths.add(targetPath);
|
||
|
|
||
|
// check if the target file has already been written by this Compiler
|
||
|
const targetFileGeneration =
|
||
|
this._assetEmittingWrittenFiles.get(targetPath);
|
||
|
|
||
|
// create an cache entry for this Source if not already existing
|
||
|
let cacheEntry = this._assetEmittingSourceCache.get(source);
|
||
|
if (cacheEntry === undefined) {
|
||
|
cacheEntry = {
|
||
|
sizeOnlySource: undefined,
|
||
|
writtenTo: new Map()
|
||
|
};
|
||
|
this._assetEmittingSourceCache.set(source, cacheEntry);
|
||
|
}
|
||
|
|
||
|
let similarEntry;
|
||
|
|
||
|
const checkSimilarFile = () => {
|
||
|
const caseInsensitiveTargetPath = targetPath.toLowerCase();
|
||
|
similarEntry = caseInsensitiveMap.get(caseInsensitiveTargetPath);
|
||
|
if (similarEntry !== undefined) {
|
||
|
const { path: other, source: otherSource } = similarEntry;
|
||
|
if (isSourceEqual(otherSource, source)) {
|
||
|
// Size may or may not be available at this point.
|
||
|
// If it's not available add to "waiting" list and it will be updated once available
|
||
|
if (similarEntry.size !== undefined) {
|
||
|
updateWithReplacementSource(similarEntry.size);
|
||
|
} else {
|
||
|
if (!similarEntry.waiting) similarEntry.waiting = [];
|
||
|
similarEntry.waiting.push({ file, cacheEntry });
|
||
|
}
|
||
|
alreadyWritten();
|
||
|
} else {
|
||
|
const err =
|
||
|
new WebpackError(`Prevent writing to file that only differs in casing or query string from already written file.
|
||
|
This will lead to a race-condition and corrupted files on case-insensitive file systems.
|
||
|
${targetPath}
|
||
|
${other}`);
|
||
|
err.file = file;
|
||
|
callback(err);
|
||
|
}
|
||
|
return true;
|
||
|
} else {
|
||
|
caseInsensitiveMap.set(
|
||
|
caseInsensitiveTargetPath,
|
||
|
(similarEntry = {
|
||
|
path: targetPath,
|
||
|
source,
|
||
|
size: undefined,
|
||
|
waiting: undefined
|
||
|
})
|
||
|
);
|
||
|
return false;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* get the binary (Buffer) content from the Source
|
||
|
* @returns {Buffer} content for the source
|
||
|
*/
|
||
|
const getContent = () => {
|
||
|
if (typeof source.buffer === "function") {
|
||
|
return source.buffer();
|
||
|
} else {
|
||
|
const bufferOrString = source.source();
|
||
|
if (Buffer.isBuffer(bufferOrString)) {
|
||
|
return bufferOrString;
|
||
|
} else {
|
||
|
return Buffer.from(bufferOrString, "utf8");
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const alreadyWritten = () => {
|
||
|
// cache the information that the Source has been already been written to that location
|
||
|
if (targetFileGeneration === undefined) {
|
||
|
const newGeneration = 1;
|
||
|
this._assetEmittingWrittenFiles.set(targetPath, newGeneration);
|
||
|
cacheEntry.writtenTo.set(targetPath, newGeneration);
|
||
|
} else {
|
||
|
cacheEntry.writtenTo.set(targetPath, targetFileGeneration);
|
||
|
}
|
||
|
callback();
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* Write the file to output file system
|
||
|
* @param {Buffer} content content to be written
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
const doWrite = content => {
|
||
|
this.outputFileSystem.writeFile(targetPath, content, err => {
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
// information marker that the asset has been emitted
|
||
|
compilation.emittedAssets.add(file);
|
||
|
|
||
|
// cache the information that the Source has been written to that location
|
||
|
const newGeneration =
|
||
|
targetFileGeneration === undefined
|
||
|
? 1
|
||
|
: targetFileGeneration + 1;
|
||
|
cacheEntry.writtenTo.set(targetPath, newGeneration);
|
||
|
this._assetEmittingWrittenFiles.set(targetPath, newGeneration);
|
||
|
this.hooks.assetEmitted.callAsync(
|
||
|
file,
|
||
|
{
|
||
|
content,
|
||
|
source,
|
||
|
outputPath,
|
||
|
compilation,
|
||
|
targetPath
|
||
|
},
|
||
|
callback
|
||
|
);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const updateWithReplacementSource = size => {
|
||
|
updateFileWithReplacementSource(file, cacheEntry, size);
|
||
|
similarEntry.size = size;
|
||
|
if (similarEntry.waiting !== undefined) {
|
||
|
for (const { file, cacheEntry } of similarEntry.waiting) {
|
||
|
updateFileWithReplacementSource(file, cacheEntry, size);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const updateFileWithReplacementSource = (
|
||
|
file,
|
||
|
cacheEntry,
|
||
|
size
|
||
|
) => {
|
||
|
// Create a replacement resource which only allows to ask for size
|
||
|
// This allows to GC all memory allocated by the Source
|
||
|
// (expect when the Source is stored in any other cache)
|
||
|
if (!cacheEntry.sizeOnlySource) {
|
||
|
cacheEntry.sizeOnlySource = new SizeOnlySource(size);
|
||
|
}
|
||
|
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
|
||
|
size
|
||
|
});
|
||
|
};
|
||
|
|
||
|
const processExistingFile = stats => {
|
||
|
// skip emitting if it's already there and an immutable file
|
||
|
if (immutable) {
|
||
|
updateWithReplacementSource(stats.size);
|
||
|
return alreadyWritten();
|
||
|
}
|
||
|
|
||
|
const content = getContent();
|
||
|
|
||
|
updateWithReplacementSource(content.length);
|
||
|
|
||
|
// if it exists and content on disk matches content
|
||
|
// skip writing the same content again
|
||
|
// (to keep mtime and don't trigger watchers)
|
||
|
// for a fast negative match file size is compared first
|
||
|
if (content.length === stats.size) {
|
||
|
compilation.comparedForEmitAssets.add(file);
|
||
|
return this.outputFileSystem.readFile(
|
||
|
targetPath,
|
||
|
(err, existingContent) => {
|
||
|
if (
|
||
|
err ||
|
||
|
!content.equals(/** @type {Buffer} */ (existingContent))
|
||
|
) {
|
||
|
return doWrite(content);
|
||
|
} else {
|
||
|
return alreadyWritten();
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
}
|
||
|
|
||
|
return doWrite(content);
|
||
|
};
|
||
|
|
||
|
const processMissingFile = () => {
|
||
|
const content = getContent();
|
||
|
|
||
|
updateWithReplacementSource(content.length);
|
||
|
|
||
|
return doWrite(content);
|
||
|
};
|
||
|
|
||
|
// if the target file has already been written
|
||
|
if (targetFileGeneration !== undefined) {
|
||
|
// check if the Source has been written to this target file
|
||
|
const writtenGeneration = cacheEntry.writtenTo.get(targetPath);
|
||
|
if (writtenGeneration === targetFileGeneration) {
|
||
|
// if yes, we may skip writing the file
|
||
|
// if it's already there
|
||
|
// (we assume one doesn't modify files while the Compiler is running, other then removing them)
|
||
|
|
||
|
if (this._assetEmittingPreviousFiles.has(targetPath)) {
|
||
|
// We assume that assets from the last compilation say intact on disk (they are not removed)
|
||
|
compilation.updateAsset(file, cacheEntry.sizeOnlySource, {
|
||
|
size: cacheEntry.sizeOnlySource.size()
|
||
|
});
|
||
|
|
||
|
return callback();
|
||
|
} else {
|
||
|
// Settings immutable will make it accept file content without comparing when file exist
|
||
|
immutable = true;
|
||
|
}
|
||
|
} else if (!immutable) {
|
||
|
if (checkSimilarFile()) return;
|
||
|
// We wrote to this file before which has very likely a different content
|
||
|
// skip comparing and assume content is different for performance
|
||
|
// This case happens often during watch mode.
|
||
|
return processMissingFile();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (checkSimilarFile()) return;
|
||
|
if (this.options.output.compareBeforeEmit) {
|
||
|
this.outputFileSystem.stat(targetPath, (err, stats) => {
|
||
|
const exists = !err && stats.isFile();
|
||
|
|
||
|
if (exists) {
|
||
|
processExistingFile(stats);
|
||
|
} else {
|
||
|
processMissingFile();
|
||
|
}
|
||
|
});
|
||
|
} else {
|
||
|
processMissingFile();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
if (targetFile.match(/\/|\\/)) {
|
||
|
const fs = this.outputFileSystem;
|
||
|
const dir = dirname(fs, join(fs, outputPath, targetFile));
|
||
|
mkdirp(fs, dir, writeOut);
|
||
|
} else {
|
||
|
writeOut();
|
||
|
}
|
||
|
},
|
||
|
err => {
|
||
|
// Clear map to free up memory
|
||
|
caseInsensitiveMap.clear();
|
||
|
if (err) {
|
||
|
this._assetEmittingPreviousFiles.clear();
|
||
|
return callback(err);
|
||
|
}
|
||
|
|
||
|
this._assetEmittingPreviousFiles = allTargetPaths;
|
||
|
|
||
|
this.hooks.afterEmit.callAsync(compilation, err => {
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
return callback();
|
||
|
});
|
||
|
}
|
||
|
);
|
||
|
};
|
||
|
|
||
|
this.hooks.emit.callAsync(compilation, err => {
|
||
|
if (err) return callback(err);
|
||
|
outputPath = compilation.getPath(this.outputPath, {});
|
||
|
mkdirp(this.outputFileSystem, outputPath, emitFiles);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<void>} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
emitRecords(callback) {
|
||
|
if (this.hooks.emitRecords.isUsed()) {
|
||
|
if (this.recordsOutputPath) {
|
||
|
asyncLib.parallel(
|
||
|
[
|
||
|
cb => this.hooks.emitRecords.callAsync(cb),
|
||
|
this._emitRecords.bind(this)
|
||
|
],
|
||
|
err => callback(err)
|
||
|
);
|
||
|
} else {
|
||
|
this.hooks.emitRecords.callAsync(callback);
|
||
|
}
|
||
|
} else {
|
||
|
if (this.recordsOutputPath) {
|
||
|
this._emitRecords(callback);
|
||
|
} else {
|
||
|
callback();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<void>} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
_emitRecords(callback) {
|
||
|
const writeFile = () => {
|
||
|
this.outputFileSystem.writeFile(
|
||
|
this.recordsOutputPath,
|
||
|
JSON.stringify(
|
||
|
this.records,
|
||
|
(n, value) => {
|
||
|
if (
|
||
|
typeof value === "object" &&
|
||
|
value !== null &&
|
||
|
!Array.isArray(value)
|
||
|
) {
|
||
|
const keys = Object.keys(value);
|
||
|
if (!isSorted(keys)) {
|
||
|
return sortObject(value, keys);
|
||
|
}
|
||
|
}
|
||
|
return value;
|
||
|
},
|
||
|
2
|
||
|
),
|
||
|
callback
|
||
|
);
|
||
|
};
|
||
|
|
||
|
const recordsOutputPathDirectory = dirname(
|
||
|
this.outputFileSystem,
|
||
|
this.recordsOutputPath
|
||
|
);
|
||
|
if (!recordsOutputPathDirectory) {
|
||
|
return writeFile();
|
||
|
}
|
||
|
mkdirp(this.outputFileSystem, recordsOutputPathDirectory, err => {
|
||
|
if (err) return callback(err);
|
||
|
writeFile();
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<void>} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
readRecords(callback) {
|
||
|
if (this.hooks.readRecords.isUsed()) {
|
||
|
if (this.recordsInputPath) {
|
||
|
asyncLib.parallel([
|
||
|
cb => this.hooks.readRecords.callAsync(cb),
|
||
|
this._readRecords.bind(this)
|
||
|
]);
|
||
|
} else {
|
||
|
this.records = {};
|
||
|
this.hooks.readRecords.callAsync(callback);
|
||
|
}
|
||
|
} else {
|
||
|
if (this.recordsInputPath) {
|
||
|
this._readRecords(callback);
|
||
|
} else {
|
||
|
this.records = {};
|
||
|
callback();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<void>} callback signals when the call finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
_readRecords(callback) {
|
||
|
if (!this.recordsInputPath) {
|
||
|
this.records = {};
|
||
|
return callback();
|
||
|
}
|
||
|
this.inputFileSystem.stat(this.recordsInputPath, err => {
|
||
|
// It doesn't exist
|
||
|
// We can ignore this.
|
||
|
if (err) return callback();
|
||
|
|
||
|
this.inputFileSystem.readFile(this.recordsInputPath, (err, content) => {
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
try {
|
||
|
this.records = parseJson(content.toString("utf-8"));
|
||
|
} catch (e) {
|
||
|
e.message = "Cannot parse records: " + e.message;
|
||
|
return callback(e);
|
||
|
}
|
||
|
|
||
|
return callback();
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Compilation} compilation the compilation
|
||
|
* @param {string} compilerName the compiler's name
|
||
|
* @param {number} compilerIndex the compiler's index
|
||
|
* @param {OutputOptions=} outputOptions the output options
|
||
|
* @param {WebpackPluginInstance[]=} plugins the plugins to apply
|
||
|
* @returns {Compiler} a child compiler
|
||
|
*/
|
||
|
createChildCompiler(
|
||
|
compilation,
|
||
|
compilerName,
|
||
|
compilerIndex,
|
||
|
outputOptions,
|
||
|
plugins
|
||
|
) {
|
||
|
const childCompiler = new Compiler(this.context, {
|
||
|
...this.options,
|
||
|
output: {
|
||
|
...this.options.output,
|
||
|
...outputOptions
|
||
|
}
|
||
|
});
|
||
|
childCompiler.name = compilerName;
|
||
|
childCompiler.outputPath = this.outputPath;
|
||
|
childCompiler.inputFileSystem = this.inputFileSystem;
|
||
|
childCompiler.outputFileSystem = null;
|
||
|
childCompiler.resolverFactory = this.resolverFactory;
|
||
|
childCompiler.modifiedFiles = this.modifiedFiles;
|
||
|
childCompiler.removedFiles = this.removedFiles;
|
||
|
childCompiler.fileTimestamps = this.fileTimestamps;
|
||
|
childCompiler.contextTimestamps = this.contextTimestamps;
|
||
|
childCompiler.fsStartTime = this.fsStartTime;
|
||
|
childCompiler.cache = this.cache;
|
||
|
childCompiler.compilerPath = `${this.compilerPath}${compilerName}|${compilerIndex}|`;
|
||
|
childCompiler._backCompat = this._backCompat;
|
||
|
|
||
|
const relativeCompilerName = makePathsRelative(
|
||
|
this.context,
|
||
|
compilerName,
|
||
|
this.root
|
||
|
);
|
||
|
if (!this.records[relativeCompilerName]) {
|
||
|
this.records[relativeCompilerName] = [];
|
||
|
}
|
||
|
if (this.records[relativeCompilerName][compilerIndex]) {
|
||
|
childCompiler.records = this.records[relativeCompilerName][compilerIndex];
|
||
|
} else {
|
||
|
this.records[relativeCompilerName].push((childCompiler.records = {}));
|
||
|
}
|
||
|
|
||
|
childCompiler.parentCompilation = compilation;
|
||
|
childCompiler.root = this.root;
|
||
|
if (Array.isArray(plugins)) {
|
||
|
for (const plugin of plugins) {
|
||
|
plugin.apply(childCompiler);
|
||
|
}
|
||
|
}
|
||
|
for (const name in this.hooks) {
|
||
|
if (
|
||
|
![
|
||
|
"make",
|
||
|
"compile",
|
||
|
"emit",
|
||
|
"afterEmit",
|
||
|
"invalid",
|
||
|
"done",
|
||
|
"thisCompilation"
|
||
|
].includes(name)
|
||
|
) {
|
||
|
if (childCompiler.hooks[name]) {
|
||
|
childCompiler.hooks[name].taps = this.hooks[name].taps.slice();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
compilation.hooks.childCompiler.call(
|
||
|
childCompiler,
|
||
|
compilerName,
|
||
|
compilerIndex
|
||
|
);
|
||
|
|
||
|
return childCompiler;
|
||
|
}
|
||
|
|
||
|
isChild() {
|
||
|
return !!this.parentCompilation;
|
||
|
}
|
||
|
|
||
|
createCompilation(params) {
|
||
|
this._cleanupLastCompilation();
|
||
|
return (this._lastCompilation = new Compilation(this, params));
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {CompilationParams} params the compilation parameters
|
||
|
* @returns {Compilation} the created compilation
|
||
|
*/
|
||
|
newCompilation(params) {
|
||
|
const compilation = this.createCompilation(params);
|
||
|
compilation.name = this.name;
|
||
|
compilation.records = this.records;
|
||
|
this.hooks.thisCompilation.call(compilation, params);
|
||
|
this.hooks.compilation.call(compilation, params);
|
||
|
return compilation;
|
||
|
}
|
||
|
|
||
|
createNormalModuleFactory() {
|
||
|
this._cleanupLastNormalModuleFactory();
|
||
|
const normalModuleFactory = new NormalModuleFactory({
|
||
|
context: this.options.context,
|
||
|
fs: this.inputFileSystem,
|
||
|
resolverFactory: this.resolverFactory,
|
||
|
options: this.options.module,
|
||
|
associatedObjectForCache: this.root,
|
||
|
layers: this.options.experiments.layers
|
||
|
});
|
||
|
this._lastNormalModuleFactory = normalModuleFactory;
|
||
|
this.hooks.normalModuleFactory.call(normalModuleFactory);
|
||
|
return normalModuleFactory;
|
||
|
}
|
||
|
|
||
|
createContextModuleFactory() {
|
||
|
const contextModuleFactory = new ContextModuleFactory(this.resolverFactory);
|
||
|
this.hooks.contextModuleFactory.call(contextModuleFactory);
|
||
|
return contextModuleFactory;
|
||
|
}
|
||
|
|
||
|
newCompilationParams() {
|
||
|
const params = {
|
||
|
normalModuleFactory: this.createNormalModuleFactory(),
|
||
|
contextModuleFactory: this.createContextModuleFactory()
|
||
|
};
|
||
|
return params;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<Compilation>} callback signals when the compilation finishes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
compile(callback) {
|
||
|
const params = this.newCompilationParams();
|
||
|
this.hooks.beforeCompile.callAsync(params, err => {
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
this.hooks.compile.call(params);
|
||
|
|
||
|
const compilation = this.newCompilation(params);
|
||
|
|
||
|
const logger = compilation.getLogger("webpack.Compiler");
|
||
|
|
||
|
logger.time("make hook");
|
||
|
this.hooks.make.callAsync(compilation, err => {
|
||
|
logger.timeEnd("make hook");
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
logger.time("finish make hook");
|
||
|
this.hooks.finishMake.callAsync(compilation, err => {
|
||
|
logger.timeEnd("finish make hook");
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
process.nextTick(() => {
|
||
|
logger.time("finish compilation");
|
||
|
compilation.finish(err => {
|
||
|
logger.timeEnd("finish compilation");
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
logger.time("seal compilation");
|
||
|
compilation.seal(err => {
|
||
|
logger.timeEnd("seal compilation");
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
logger.time("afterCompile hook");
|
||
|
this.hooks.afterCompile.callAsync(compilation, err => {
|
||
|
logger.timeEnd("afterCompile hook");
|
||
|
if (err) return callback(err);
|
||
|
|
||
|
return callback(null, compilation);
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @param {Callback<void>} callback signals when the compiler closes
|
||
|
* @returns {void}
|
||
|
*/
|
||
|
close(callback) {
|
||
|
if (this.watching) {
|
||
|
// When there is still an active watching, close this first
|
||
|
this.watching.close(err => {
|
||
|
this.close(callback);
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
this.hooks.shutdown.callAsync(err => {
|
||
|
if (err) return callback(err);
|
||
|
// Get rid of reference to last compilation to avoid leaking memory
|
||
|
// We can't run this._cleanupLastCompilation() as the Stats to this compilation
|
||
|
// might be still in use. We try to get rid of the reference to the cache instead.
|
||
|
this._lastCompilation = undefined;
|
||
|
this._lastNormalModuleFactory = undefined;
|
||
|
this.cache.shutdown(callback);
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
module.exports = Compiler;
|