8889841cMinMaxSizeWarning.js000066600000001463150441747060010474 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const SizeFormatHelpers = require("../SizeFormatHelpers"); const WebpackError = require("../WebpackError"); class MinMaxSizeWarning extends WebpackError { constructor(keys, minSize, maxSize) { let keysMessage = "Fallback cache group"; if (keys) { keysMessage = keys.length > 1 ? `Cache groups ${keys.sort().join(", ")}` : `Cache group ${keys[0]}`; } super( `SplitChunksPlugin\n` + `${keysMessage}\n` + `Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` + `bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` + "This seem to be a invalid optimization.splitChunks configuration." ); } } module.exports = MinMaxSizeWarning; RealContentHashPlugin.js000066600000030150150441747060011316 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { SyncBailHook } = require("tapable"); const { RawSource, CachedSource, CompatSource } = require("webpack-sources"); const Compilation = require("../Compilation"); const WebpackError = require("../WebpackError"); const { compareSelect, compareStrings } = require("../util/comparators"); const createHash = require("../util/createHash"); /** @typedef {import("webpack-sources").Source} Source */ /** @typedef {import("../Compilation").AssetInfo} AssetInfo */ /** @typedef {import("../Compiler")} Compiler */ const EMPTY_SET = new Set(); const addToList = (itemOrItems, list) => { if (Array.isArray(itemOrItems)) { for (const item of itemOrItems) { list.add(item); } } else if (itemOrItems) { list.add(itemOrItems); } }; /** * @template T * @param {T[]} input list * @param {function(T): Buffer} fn map function * @returns {Buffer[]} buffers without duplicates */ const mapAndDeduplicateBuffers = (input, fn) => { // Buffer.equals compares size first so this should be efficient enough // If it becomes a performance problem we can use a map and group by size // instead of looping over all assets. const result = []; outer: for (const value of input) { const buf = fn(value); for (const other of result) { if (buf.equals(other)) continue outer; } result.push(buf); } return result; }; /** * Escapes regular expression metacharacters * @param {string} str String to quote * @returns {string} Escaped string */ const quoteMeta = str => { return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&"); }; const cachedSourceMap = new WeakMap(); const toCachedSource = source => { if (source instanceof CachedSource) { return source; } const entry = cachedSourceMap.get(source); if (entry !== undefined) return entry; const newSource = new CachedSource(CompatSource.from(source)); cachedSourceMap.set(source, newSource); return newSource; }; /** * @typedef {Object} AssetInfoForRealContentHash * @property {string} name * @property {AssetInfo} info * @property {Source} source * @property {RawSource | undefined} newSource * @property {RawSource | undefined} newSourceWithoutOwn * @property {string} content * @property {Set} ownHashes * @property {Promise} contentComputePromise * @property {Promise} contentComputeWithoutOwnPromise * @property {Set} referencedHashes * @property {Set} hashes */ /** * @typedef {Object} CompilationHooks * @property {SyncBailHook<[Buffer[], string], string>} updateHash */ /** @type {WeakMap} */ const compilationHooksMap = new WeakMap(); class RealContentHashPlugin { /** * @param {Compilation} compilation the compilation * @returns {CompilationHooks} the attached hooks */ static getCompilationHooks(compilation) { if (!(compilation instanceof Compilation)) { throw new TypeError( "The 'compilation' argument must be an instance of Compilation" ); } let hooks = compilationHooksMap.get(compilation); if (hooks === undefined) { hooks = { updateHash: new SyncBailHook(["content", "oldHash"]) }; compilationHooksMap.set(compilation, hooks); } return hooks; } constructor({ hashFunction, hashDigest }) { this._hashFunction = hashFunction; this._hashDigest = hashDigest; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap("RealContentHashPlugin", compilation => { const cacheAnalyse = compilation.getCache( "RealContentHashPlugin|analyse" ); const cacheGenerate = compilation.getCache( "RealContentHashPlugin|generate" ); const hooks = RealContentHashPlugin.getCompilationHooks(compilation); compilation.hooks.processAssets.tapPromise( { name: "RealContentHashPlugin", stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH }, async () => { const assets = compilation.getAssets(); /** @type {AssetInfoForRealContentHash[]} */ const assetsWithInfo = []; const hashToAssets = new Map(); for (const { source, info, name } of assets) { const cachedSource = toCachedSource(source); const content = cachedSource.source(); /** @type {Set} */ const hashes = new Set(); addToList(info.contenthash, hashes); const data = { name, info, source: cachedSource, /** @type {RawSource | undefined} */ newSource: undefined, /** @type {RawSource | undefined} */ newSourceWithoutOwn: undefined, content, /** @type {Set} */ ownHashes: undefined, contentComputePromise: undefined, contentComputeWithoutOwnPromise: undefined, /** @type {Set} */ referencedHashes: undefined, hashes }; assetsWithInfo.push(data); for (const hash of hashes) { const list = hashToAssets.get(hash); if (list === undefined) { hashToAssets.set(hash, [data]); } else { list.push(data); } } } if (hashToAssets.size === 0) return; const hashRegExp = new RegExp( Array.from(hashToAssets.keys(), quoteMeta).join("|"), "g" ); await Promise.all( assetsWithInfo.map(async asset => { const { name, source, content, hashes } = asset; if (Buffer.isBuffer(content)) { asset.referencedHashes = EMPTY_SET; asset.ownHashes = EMPTY_SET; return; } const etag = cacheAnalyse.mergeEtags( cacheAnalyse.getLazyHashedEtag(source), Array.from(hashes).join("|") ); [asset.referencedHashes, asset.ownHashes] = await cacheAnalyse.providePromise(name, etag, () => { const referencedHashes = new Set(); let ownHashes = new Set(); const inContent = content.match(hashRegExp); if (inContent) { for (const hash of inContent) { if (hashes.has(hash)) { ownHashes.add(hash); continue; } referencedHashes.add(hash); } } return [referencedHashes, ownHashes]; }); }) ); const getDependencies = hash => { const assets = hashToAssets.get(hash); if (!assets) { const referencingAssets = assetsWithInfo.filter(asset => asset.referencedHashes.has(hash) ); const err = new WebpackError(`RealContentHashPlugin Some kind of unexpected caching problem occurred. An asset was cached with a reference to another asset (${hash}) that's not in the compilation anymore. Either the asset was incorrectly cached, or the referenced asset should also be restored from cache. Referenced by: ${referencingAssets .map(a => { const match = new RegExp(`.{0,20}${quoteMeta(hash)}.{0,20}`).exec( a.content ); return ` - ${a.name}: ...${match ? match[0] : "???"}...`; }) .join("\n")}`); compilation.errors.push(err); return undefined; } const hashes = new Set(); for (const { referencedHashes, ownHashes } of assets) { if (!ownHashes.has(hash)) { for (const hash of ownHashes) { hashes.add(hash); } } for (const hash of referencedHashes) { hashes.add(hash); } } return hashes; }; const hashInfo = hash => { const assets = hashToAssets.get(hash); return `${hash} (${Array.from(assets, a => a.name)})`; }; const hashesInOrder = new Set(); for (const hash of hashToAssets.keys()) { const add = (hash, stack) => { const deps = getDependencies(hash); if (!deps) return; stack.add(hash); for (const dep of deps) { if (hashesInOrder.has(dep)) continue; if (stack.has(dep)) { throw new Error( `Circular hash dependency ${Array.from( stack, hashInfo ).join(" -> ")} -> ${hashInfo(dep)}` ); } add(dep, stack); } hashesInOrder.add(hash); stack.delete(hash); }; if (hashesInOrder.has(hash)) continue; add(hash, new Set()); } const hashToNewHash = new Map(); const getEtag = asset => cacheGenerate.mergeEtags( cacheGenerate.getLazyHashedEtag(asset.source), Array.from(asset.referencedHashes, hash => hashToNewHash.get(hash) ).join("|") ); const computeNewContent = asset => { if (asset.contentComputePromise) return asset.contentComputePromise; return (asset.contentComputePromise = (async () => { if ( asset.ownHashes.size > 0 || Array.from(asset.referencedHashes).some( hash => hashToNewHash.get(hash) !== hash ) ) { const identifier = asset.name; const etag = getEtag(asset); asset.newSource = await cacheGenerate.providePromise( identifier, etag, () => { const newContent = asset.content.replace(hashRegExp, hash => hashToNewHash.get(hash) ); return new RawSource(newContent); } ); } })()); }; const computeNewContentWithoutOwn = asset => { if (asset.contentComputeWithoutOwnPromise) return asset.contentComputeWithoutOwnPromise; return (asset.contentComputeWithoutOwnPromise = (async () => { if ( asset.ownHashes.size > 0 || Array.from(asset.referencedHashes).some( hash => hashToNewHash.get(hash) !== hash ) ) { const identifier = asset.name + "|without-own"; const etag = getEtag(asset); asset.newSourceWithoutOwn = await cacheGenerate.providePromise( identifier, etag, () => { const newContent = asset.content.replace( hashRegExp, hash => { if (asset.ownHashes.has(hash)) { return ""; } return hashToNewHash.get(hash); } ); return new RawSource(newContent); } ); } })()); }; const comparator = compareSelect(a => a.name, compareStrings); for (const oldHash of hashesInOrder) { const assets = hashToAssets.get(oldHash); assets.sort(comparator); await Promise.all( assets.map(asset => asset.ownHashes.has(oldHash) ? computeNewContentWithoutOwn(asset) : computeNewContent(asset) ) ); const assetsContent = mapAndDeduplicateBuffers(assets, asset => { if (asset.ownHashes.has(oldHash)) { return asset.newSourceWithoutOwn ? asset.newSourceWithoutOwn.buffer() : asset.source.buffer(); } else { return asset.newSource ? asset.newSource.buffer() : asset.source.buffer(); } }); let newHash = hooks.updateHash.call(assetsContent, oldHash); if (!newHash) { const hash = createHash(this._hashFunction); if (compilation.outputOptions.hashSalt) { hash.update(compilation.outputOptions.hashSalt); } for (const content of assetsContent) { hash.update(content); } const digest = hash.digest(this._hashDigest); newHash = /** @type {string} */ (digest.slice(0, oldHash.length)); } hashToNewHash.set(oldHash, newHash); } await Promise.all( assetsWithInfo.map(async asset => { await computeNewContent(asset); const newName = asset.name.replace(hashRegExp, hash => hashToNewHash.get(hash) ); const infoUpdate = {}; const hash = asset.info.contenthash; infoUpdate.contenthash = Array.isArray(hash) ? hash.map(hash => hashToNewHash.get(hash)) : hashToNewHash.get(hash); if (asset.newSource !== undefined) { compilation.updateAsset( asset.name, asset.newSource, infoUpdate ); } else { compilation.updateAsset(asset.name, asset.source, infoUpdate); } if (asset.name !== newName) { compilation.renameAsset(asset.name, newName); } }) ); } ); }); } } module.exports = RealContentHashPlugin; InnerGraph.js000066600000022002150441747060007147 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Sergey Melyukov @smelukov */ "use strict"; const { UsageState } = require("../ExportsInfo"); /** @typedef {import("estree").Node} AnyNode */ /** @typedef {import("../Dependency")} Dependency */ /** @typedef {import("../ModuleGraph")} ModuleGraph */ /** @typedef {import("../ModuleGraphConnection")} ModuleGraphConnection */ /** @typedef {import("../ModuleGraphConnection").ConnectionState} ConnectionState */ /** @typedef {import("../Parser").ParserState} ParserState */ /** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */ /** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */ /** @typedef {Map | true>} InnerGraph */ /** @typedef {function(boolean | Set | undefined): void} UsageCallback */ /** * @typedef {Object} StateObject * @property {InnerGraph} innerGraph * @property {TopLevelSymbol=} currentTopLevelSymbol * @property {Map>} usageCallbackMap */ /** @typedef {false|StateObject} State */ /** @type {WeakMap} */ const parserStateMap = new WeakMap(); const topLevelSymbolTag = Symbol("top level symbol"); /** * @param {ParserState} parserState parser state * @returns {State} state */ function getState(parserState) { return parserStateMap.get(parserState); } /** * @param {ParserState} parserState parser state * @returns {void} */ exports.bailout = parserState => { parserStateMap.set(parserState, false); }; /** * @param {ParserState} parserState parser state * @returns {void} */ exports.enable = parserState => { const state = parserStateMap.get(parserState); if (state === false) { return; } parserStateMap.set(parserState, { innerGraph: new Map(), currentTopLevelSymbol: undefined, usageCallbackMap: new Map() }); }; /** * @param {ParserState} parserState parser state * @returns {boolean} true, when enabled */ exports.isEnabled = parserState => { const state = parserStateMap.get(parserState); return !!state; }; /** * @param {ParserState} state parser state * @param {TopLevelSymbol | null} symbol the symbol, or null for all symbols * @param {string | TopLevelSymbol | true} usage usage data * @returns {void} */ exports.addUsage = (state, symbol, usage) => { const innerGraphState = getState(state); if (innerGraphState) { const { innerGraph } = innerGraphState; const info = innerGraph.get(symbol); if (usage === true) { innerGraph.set(symbol, true); } else if (info === undefined) { innerGraph.set(symbol, new Set([usage])); } else if (info !== true) { info.add(usage); } } }; /** * @param {JavascriptParser} parser the parser * @param {string} name name of variable * @param {string | TopLevelSymbol | true} usage usage data * @returns {void} */ exports.addVariableUsage = (parser, name, usage) => { const symbol = /** @type {TopLevelSymbol} */ ( parser.getTagData(name, topLevelSymbolTag) ) || exports.tagTopLevelSymbol(parser, name); if (symbol) { exports.addUsage(parser.state, symbol, usage); } }; /** * @param {ParserState} state parser state * @returns {void} */ exports.inferDependencyUsage = state => { const innerGraphState = getState(state); if (!innerGraphState) { return; } const { innerGraph, usageCallbackMap } = innerGraphState; const processed = new Map(); // flatten graph to terminal nodes (string, undefined or true) const nonTerminal = new Set(innerGraph.keys()); while (nonTerminal.size > 0) { for (const key of nonTerminal) { /** @type {Set | true} */ let newSet = new Set(); let isTerminal = true; const value = innerGraph.get(key); let alreadyProcessed = processed.get(key); if (alreadyProcessed === undefined) { alreadyProcessed = new Set(); processed.set(key, alreadyProcessed); } if (value !== true && value !== undefined) { for (const item of value) { alreadyProcessed.add(item); } for (const item of value) { if (typeof item === "string") { newSet.add(item); } else { const itemValue = innerGraph.get(item); if (itemValue === true) { newSet = true; break; } if (itemValue !== undefined) { for (const i of itemValue) { if (i === key) continue; if (alreadyProcessed.has(i)) continue; newSet.add(i); if (typeof i !== "string") { isTerminal = false; } } } } } if (newSet === true) { innerGraph.set(key, true); } else if (newSet.size === 0) { innerGraph.set(key, undefined); } else { innerGraph.set(key, newSet); } } if (isTerminal) { nonTerminal.delete(key); // For the global key, merge with all other keys if (key === null) { const globalValue = innerGraph.get(null); if (globalValue) { for (const [key, value] of innerGraph) { if (key !== null && value !== true) { if (globalValue === true) { innerGraph.set(key, true); } else { const newSet = new Set(value); for (const item of globalValue) { newSet.add(item); } innerGraph.set(key, newSet); } } } } } } } } /** @type {Map>} */ for (const [symbol, callbacks] of usageCallbackMap) { const usage = /** @type {true | Set | undefined} */ ( innerGraph.get(symbol) ); for (const callback of callbacks) { callback(usage === undefined ? false : usage); } } }; /** * @param {ParserState} state parser state * @param {UsageCallback} onUsageCallback on usage callback */ exports.onUsage = (state, onUsageCallback) => { const innerGraphState = getState(state); if (innerGraphState) { const { usageCallbackMap, currentTopLevelSymbol } = innerGraphState; if (currentTopLevelSymbol) { let callbacks = usageCallbackMap.get(currentTopLevelSymbol); if (callbacks === undefined) { callbacks = new Set(); usageCallbackMap.set(currentTopLevelSymbol, callbacks); } callbacks.add(onUsageCallback); } else { onUsageCallback(true); } } else { onUsageCallback(undefined); } }; /** * @param {ParserState} state parser state * @param {TopLevelSymbol} symbol the symbol */ exports.setTopLevelSymbol = (state, symbol) => { const innerGraphState = getState(state); if (innerGraphState) { innerGraphState.currentTopLevelSymbol = symbol; } }; /** * @param {ParserState} state parser state * @returns {TopLevelSymbol|void} usage data */ exports.getTopLevelSymbol = state => { const innerGraphState = getState(state); if (innerGraphState) { return innerGraphState.currentTopLevelSymbol; } }; /** * @param {JavascriptParser} parser parser * @param {string} name name of variable * @returns {TopLevelSymbol} symbol */ exports.tagTopLevelSymbol = (parser, name) => { const innerGraphState = getState(parser.state); if (!innerGraphState) return; parser.defineVariable(name); const existingTag = /** @type {TopLevelSymbol} */ ( parser.getTagData(name, topLevelSymbolTag) ); if (existingTag) { return existingTag; } const fn = new TopLevelSymbol(name); parser.tagVariable(name, topLevelSymbolTag, fn); return fn; }; /** * @param {Dependency} dependency the dependency * @param {Set | boolean} usedByExports usedByExports info * @param {ModuleGraph} moduleGraph moduleGraph * @param {RuntimeSpec} runtime runtime * @returns {boolean} false, when unused. Otherwise true */ exports.isDependencyUsedByExports = ( dependency, usedByExports, moduleGraph, runtime ) => { if (usedByExports === false) return false; if (usedByExports !== true && usedByExports !== undefined) { const selfModule = moduleGraph.getParentModule(dependency); const exportsInfo = moduleGraph.getExportsInfo(selfModule); let used = false; for (const exportName of usedByExports) { if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused) used = true; } if (!used) return false; } return true; }; /** * @param {Dependency} dependency the dependency * @param {Set | boolean} usedByExports usedByExports info * @param {ModuleGraph} moduleGraph moduleGraph * @returns {null | false | function(ModuleGraphConnection, RuntimeSpec): ConnectionState} function to determine if the connection is active */ exports.getDependencyUsedByExportsCondition = ( dependency, usedByExports, moduleGraph ) => { if (usedByExports === false) return false; if (usedByExports !== true && usedByExports !== undefined) { const selfModule = moduleGraph.getParentModule(dependency); const exportsInfo = moduleGraph.getExportsInfo(selfModule); return (connections, runtime) => { for (const exportName of usedByExports) { if (exportsInfo.getUsed(exportName, runtime) !== UsageState.Unused) return true; } return false; }; } return null; }; class TopLevelSymbol { /** * @param {string} name name of the variable */ constructor(name) { this.name = name; } } exports.TopLevelSymbol = TopLevelSymbol; exports.topLevelSymbolTag = topLevelSymbolTag; SideEffectsFlagPlugin.js000066600000024076150441747060011264 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const glob2regexp = require("glob-to-regexp"); const { STAGE_DEFAULT } = require("../OptimizationStages"); const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency"); const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportSpecifierDependency"); const formatLocation = require("../formatLocation"); /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Dependency")} Dependency */ /** @typedef {import("../Module")} Module */ /** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */ /** * @typedef {Object} ExportInModule * @property {Module} module the module * @property {string} exportName the name of the export * @property {boolean} checked if the export is conditional */ /** * @typedef {Object} ReexportInfo * @property {Map} static * @property {Map>} dynamic */ /** @type {WeakMap>} */ const globToRegexpCache = new WeakMap(); /** * @param {string} glob the pattern * @param {Map} cache the glob to RegExp cache * @returns {RegExp} a regular expression */ const globToRegexp = (glob, cache) => { const cacheEntry = cache.get(glob); if (cacheEntry !== undefined) return cacheEntry; if (!glob.includes("/")) { glob = `**/${glob}`; } const baseRegexp = glob2regexp(glob, { globstar: true, extended: true }); const regexpSource = baseRegexp.source; const regexp = new RegExp("^(\\./)?" + regexpSource.slice(1)); cache.set(glob, regexp); return regexp; }; class SideEffectsFlagPlugin { /** * @param {boolean} analyseSource analyse source code for side effects */ constructor(analyseSource = true) { this._analyseSource = analyseSource; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { let cache = globToRegexpCache.get(compiler.root); if (cache === undefined) { cache = new Map(); globToRegexpCache.set(compiler.root, cache); } compiler.hooks.compilation.tap( "SideEffectsFlagPlugin", (compilation, { normalModuleFactory }) => { const moduleGraph = compilation.moduleGraph; normalModuleFactory.hooks.module.tap( "SideEffectsFlagPlugin", (module, data) => { const resolveData = data.resourceResolveData; if ( resolveData && resolveData.descriptionFileData && resolveData.relativePath ) { const sideEffects = resolveData.descriptionFileData.sideEffects; if (sideEffects !== undefined) { if (module.factoryMeta === undefined) { module.factoryMeta = {}; } const hasSideEffects = SideEffectsFlagPlugin.moduleHasSideEffects( resolveData.relativePath, sideEffects, cache ); module.factoryMeta.sideEffectFree = !hasSideEffects; } } return module; } ); normalModuleFactory.hooks.module.tap( "SideEffectsFlagPlugin", (module, data) => { if (typeof data.settings.sideEffects === "boolean") { if (module.factoryMeta === undefined) { module.factoryMeta = {}; } module.factoryMeta.sideEffectFree = !data.settings.sideEffects; } return module; } ); if (this._analyseSource) { /** * @param {JavascriptParser} parser the parser * @returns {void} */ const parserHandler = parser => { let sideEffectsStatement; parser.hooks.program.tap("SideEffectsFlagPlugin", () => { sideEffectsStatement = undefined; }); parser.hooks.statement.tap( { name: "SideEffectsFlagPlugin", stage: -100 }, statement => { if (sideEffectsStatement) return; if (parser.scope.topLevelScope !== true) return; switch (statement.type) { case "ExpressionStatement": if ( !parser.isPure(statement.expression, statement.range[0]) ) { sideEffectsStatement = statement; } break; case "IfStatement": case "WhileStatement": case "DoWhileStatement": if (!parser.isPure(statement.test, statement.range[0])) { sideEffectsStatement = statement; } // statement hook will be called for child statements too break; case "ForStatement": if ( !parser.isPure(statement.init, statement.range[0]) || !parser.isPure( statement.test, statement.init ? statement.init.range[1] : statement.range[0] ) || !parser.isPure( statement.update, statement.test ? statement.test.range[1] : statement.init ? statement.init.range[1] : statement.range[0] ) ) { sideEffectsStatement = statement; } // statement hook will be called for child statements too break; case "SwitchStatement": if ( !parser.isPure(statement.discriminant, statement.range[0]) ) { sideEffectsStatement = statement; } // statement hook will be called for child statements too break; case "VariableDeclaration": case "ClassDeclaration": case "FunctionDeclaration": if (!parser.isPure(statement, statement.range[0])) { sideEffectsStatement = statement; } break; case "ExportNamedDeclaration": case "ExportDefaultDeclaration": if ( !parser.isPure(statement.declaration, statement.range[0]) ) { sideEffectsStatement = statement; } break; case "LabeledStatement": case "BlockStatement": // statement hook will be called for child statements too break; case "EmptyStatement": break; case "ExportAllDeclaration": case "ImportDeclaration": // imports will be handled by the dependencies break; default: sideEffectsStatement = statement; break; } } ); parser.hooks.finish.tap("SideEffectsFlagPlugin", () => { if (sideEffectsStatement === undefined) { parser.state.module.buildMeta.sideEffectFree = true; } else { const { loc, type } = sideEffectsStatement; moduleGraph .getOptimizationBailout(parser.state.module) .push( () => `Statement (${type}) with side effects in source code at ${formatLocation( loc )}` ); } }); }; for (const key of [ "javascript/auto", "javascript/esm", "javascript/dynamic" ]) { normalModuleFactory.hooks.parser .for(key) .tap("SideEffectsFlagPlugin", parserHandler); } } compilation.hooks.optimizeDependencies.tap( { name: "SideEffectsFlagPlugin", stage: STAGE_DEFAULT }, modules => { const logger = compilation.getLogger( "webpack.SideEffectsFlagPlugin" ); logger.time("update dependencies"); for (const module of modules) { if (module.getSideEffectsConnectionState(moduleGraph) === false) { const exportsInfo = moduleGraph.getExportsInfo(module); for (const connection of moduleGraph.getIncomingConnections( module )) { const dep = connection.dependency; let isReexport; if ( (isReexport = dep instanceof HarmonyExportImportedSpecifierDependency) || (dep instanceof HarmonyImportSpecifierDependency && !dep.namespaceObjectAsContext) ) { // TODO improve for export * if (isReexport && dep.name) { const exportInfo = moduleGraph.getExportInfo( connection.originModule, dep.name ); exportInfo.moveTarget( moduleGraph, ({ module }) => module.getSideEffectsConnectionState(moduleGraph) === false, ({ module: newModule, export: exportName }) => { moduleGraph.updateModule(dep, newModule); moduleGraph.addExplanation( dep, "(skipped side-effect-free modules)" ); const ids = dep.getIds(moduleGraph); dep.setIds( moduleGraph, exportName ? [...exportName, ...ids.slice(1)] : ids.slice(1) ); return moduleGraph.getConnection(dep); } ); continue; } // TODO improve for nested imports const ids = dep.getIds(moduleGraph); if (ids.length > 0) { const exportInfo = exportsInfo.getExportInfo(ids[0]); const target = exportInfo.getTarget( moduleGraph, ({ module }) => module.getSideEffectsConnectionState(moduleGraph) === false ); if (!target) continue; moduleGraph.updateModule(dep, target.module); moduleGraph.addExplanation( dep, "(skipped side-effect-free modules)" ); dep.setIds( moduleGraph, target.export ? [...target.export, ...ids.slice(1)] : ids.slice(1) ); } } } } } logger.timeEnd("update dependencies"); } ); } ); } static moduleHasSideEffects(moduleName, flagValue, cache) { switch (typeof flagValue) { case "undefined": return true; case "boolean": return flagValue; case "string": return globToRegexp(flagValue, cache).test(moduleName); case "object": return flagValue.some(glob => SideEffectsFlagPlugin.moduleHasSideEffects(moduleName, glob, cache) ); } } } module.exports = SideEffectsFlagPlugin; InnerGraphPlugin.js000066600000027416150441747060010344 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const PureExpressionDependency = require("../dependencies/PureExpressionDependency"); const InnerGraph = require("./InnerGraph"); /** @typedef {import("estree").ClassDeclaration} ClassDeclarationNode */ /** @typedef {import("estree").ClassExpression} ClassExpressionNode */ /** @typedef {import("estree").Node} Node */ /** @typedef {import("estree").VariableDeclarator} VariableDeclaratorNode */ /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Dependency")} Dependency */ /** @typedef {import("../dependencies/HarmonyImportSpecifierDependency")} HarmonyImportSpecifierDependency */ /** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */ /** @typedef {import("./InnerGraph").InnerGraph} InnerGraph */ /** @typedef {import("./InnerGraph").TopLevelSymbol} TopLevelSymbol */ const { topLevelSymbolTag } = InnerGraph; class InnerGraphPlugin { /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap( "InnerGraphPlugin", (compilation, { normalModuleFactory }) => { const logger = compilation.getLogger("webpack.InnerGraphPlugin"); compilation.dependencyTemplates.set( PureExpressionDependency, new PureExpressionDependency.Template() ); /** * @param {JavascriptParser} parser the parser * @param {Object} parserOptions options * @returns {void} */ const handler = (parser, parserOptions) => { const onUsageSuper = sup => { InnerGraph.onUsage(parser.state, usedByExports => { switch (usedByExports) { case undefined: case true: return; default: { const dep = new PureExpressionDependency(sup.range); dep.loc = sup.loc; dep.usedByExports = usedByExports; parser.state.module.addDependency(dep); break; } } }); }; parser.hooks.program.tap("InnerGraphPlugin", () => { InnerGraph.enable(parser.state); }); parser.hooks.finish.tap("InnerGraphPlugin", () => { if (!InnerGraph.isEnabled(parser.state)) return; logger.time("infer dependency usage"); InnerGraph.inferDependencyUsage(parser.state); logger.timeAggregate("infer dependency usage"); }); // During prewalking the following datastructures are filled with // nodes that have a TopLevelSymbol assigned and // variables are tagged with the assigned TopLevelSymbol // We differ 3 types of nodes: // 1. full statements (export default, function declaration) // 2. classes (class declaration, class expression) // 3. variable declarators (const x = ...) /** @type {WeakMap} */ const statementWithTopLevelSymbol = new WeakMap(); /** @type {WeakMap} */ const statementPurePart = new WeakMap(); /** @type {WeakMap} */ const classWithTopLevelSymbol = new WeakMap(); /** @type {WeakMap} */ const declWithTopLevelSymbol = new WeakMap(); /** @type {WeakSet} */ const pureDeclarators = new WeakSet(); // The following hooks are used during prewalking: parser.hooks.preStatement.tap("InnerGraphPlugin", statement => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { if (statement.type === "FunctionDeclaration") { const name = statement.id ? statement.id.name : "*default*"; const fn = InnerGraph.tagTopLevelSymbol(parser, name); statementWithTopLevelSymbol.set(statement, fn); return true; } } }); parser.hooks.blockPreStatement.tap("InnerGraphPlugin", statement => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { if (statement.type === "ClassDeclaration") { const name = statement.id ? statement.id.name : "*default*"; const fn = InnerGraph.tagTopLevelSymbol(parser, name); classWithTopLevelSymbol.set(statement, fn); return true; } if (statement.type === "ExportDefaultDeclaration") { const name = "*default*"; const fn = InnerGraph.tagTopLevelSymbol(parser, name); const decl = statement.declaration; if ( decl.type === "ClassExpression" || decl.type === "ClassDeclaration" ) { classWithTopLevelSymbol.set(decl, fn); } else if (parser.isPure(decl, statement.range[0])) { statementWithTopLevelSymbol.set(statement, fn); if ( !decl.type.endsWith("FunctionExpression") && !decl.type.endsWith("Declaration") && decl.type !== "Literal" ) { statementPurePart.set(statement, decl); } } } } }); parser.hooks.preDeclarator.tap( "InnerGraphPlugin", (decl, statement) => { if (!InnerGraph.isEnabled(parser.state)) return; if ( parser.scope.topLevelScope === true && decl.init && decl.id.type === "Identifier" ) { const name = decl.id.name; if (decl.init.type === "ClassExpression") { const fn = InnerGraph.tagTopLevelSymbol(parser, name); classWithTopLevelSymbol.set(decl.init, fn); } else if (parser.isPure(decl.init, decl.id.range[1])) { const fn = InnerGraph.tagTopLevelSymbol(parser, name); declWithTopLevelSymbol.set(decl, fn); if ( !decl.init.type.endsWith("FunctionExpression") && decl.init.type !== "Literal" ) { pureDeclarators.add(decl); } return true; } } } ); // During real walking we set the TopLevelSymbol state to the assigned // TopLevelSymbol by using the fill datastructures. // In addition to tracking TopLevelSymbols, we sometimes need to // add a PureExpressionDependency. This is needed to skip execution // of pure expressions, even when they are not dropped due to // minimizing. Otherwise symbols used there might not exist anymore // as they are removed as unused by this optimization // When we find a reference to a TopLevelSymbol, we register a // TopLevelSymbol dependency from TopLevelSymbol in state to the // referenced TopLevelSymbol. This way we get a graph of all // TopLevelSymbols. // The following hooks are called during walking: parser.hooks.statement.tap("InnerGraphPlugin", statement => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { InnerGraph.setTopLevelSymbol(parser.state, undefined); const fn = statementWithTopLevelSymbol.get(statement); if (fn) { InnerGraph.setTopLevelSymbol(parser.state, fn); const purePart = statementPurePart.get(statement); if (purePart) { InnerGraph.onUsage(parser.state, usedByExports => { switch (usedByExports) { case undefined: case true: return; default: { const dep = new PureExpressionDependency( purePart.range ); dep.loc = statement.loc; dep.usedByExports = usedByExports; parser.state.module.addDependency(dep); break; } } }); } } } }); parser.hooks.classExtendsExpression.tap( "InnerGraphPlugin", (expr, statement) => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { const fn = classWithTopLevelSymbol.get(statement); if ( fn && parser.isPure( expr, statement.id ? statement.id.range[1] : statement.range[0] ) ) { InnerGraph.setTopLevelSymbol(parser.state, fn); onUsageSuper(expr); } } } ); parser.hooks.classBodyElement.tap( "InnerGraphPlugin", (element, classDefinition) => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { const fn = classWithTopLevelSymbol.get(classDefinition); if (fn) { InnerGraph.setTopLevelSymbol(parser.state, undefined); } } } ); parser.hooks.classBodyValue.tap( "InnerGraphPlugin", (expression, element, classDefinition) => { if (!InnerGraph.isEnabled(parser.state)) return; if (parser.scope.topLevelScope === true) { const fn = classWithTopLevelSymbol.get(classDefinition); if (fn) { if ( !element.static || parser.isPure( expression, element.key ? element.key.range[1] : element.range[0] ) ) { InnerGraph.setTopLevelSymbol(parser.state, fn); if (element.type !== "MethodDefinition" && element.static) { InnerGraph.onUsage(parser.state, usedByExports => { switch (usedByExports) { case undefined: case true: return; default: { const dep = new PureExpressionDependency( expression.range ); dep.loc = expression.loc; dep.usedByExports = usedByExports; parser.state.module.addDependency(dep); break; } } }); } } else { InnerGraph.setTopLevelSymbol(parser.state, undefined); } } } } ); parser.hooks.declarator.tap("InnerGraphPlugin", (decl, statement) => { if (!InnerGraph.isEnabled(parser.state)) return; const fn = declWithTopLevelSymbol.get(decl); if (fn) { InnerGraph.setTopLevelSymbol(parser.state, fn); if (pureDeclarators.has(decl)) { if (decl.init.type === "ClassExpression") { if (decl.init.superClass) { onUsageSuper(decl.init.superClass); } } else { InnerGraph.onUsage(parser.state, usedByExports => { switch (usedByExports) { case undefined: case true: return; default: { const dep = new PureExpressionDependency( decl.init.range ); dep.loc = decl.loc; dep.usedByExports = usedByExports; parser.state.module.addDependency(dep); break; } } }); } } parser.walkExpression(decl.init); InnerGraph.setTopLevelSymbol(parser.state, undefined); return true; } }); parser.hooks.expression .for(topLevelSymbolTag) .tap("InnerGraphPlugin", () => { const topLevelSymbol = /** @type {TopLevelSymbol} */ ( parser.currentTagData ); const currentTopLevelSymbol = InnerGraph.getTopLevelSymbol( parser.state ); InnerGraph.addUsage( parser.state, topLevelSymbol, currentTopLevelSymbol || true ); }); parser.hooks.assign .for(topLevelSymbolTag) .tap("InnerGraphPlugin", expr => { if (!InnerGraph.isEnabled(parser.state)) return; if (expr.operator === "=") return true; }); }; normalModuleFactory.hooks.parser .for("javascript/auto") .tap("InnerGraphPlugin", handler); normalModuleFactory.hooks.parser .for("javascript/esm") .tap("InnerGraphPlugin", handler); compilation.hooks.finishModules.tap("InnerGraphPlugin", () => { logger.timeAggregateEnd("infer dependency usage"); }); } ); } } module.exports = InnerGraphPlugin; RuntimeChunkPlugin.js000066600000002021150441747060010704 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; /** @typedef {import("../Compiler")} Compiler */ class RuntimeChunkPlugin { constructor(options) { this.options = { name: entrypoint => `runtime~${entrypoint.name}`, ...options }; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.thisCompilation.tap("RuntimeChunkPlugin", compilation => { compilation.hooks.addEntry.tap( "RuntimeChunkPlugin", (_, { name: entryName }) => { if (entryName === undefined) return; const data = compilation.entries.get(entryName); if (data.options.runtime === undefined && !data.options.dependOn) { // Determine runtime chunk name let name = this.options.name; if (typeof name === "function") { name = name({ name: entryName }); } data.options.runtime = name; } } ); }); } } module.exports = RuntimeChunkPlugin; FlagIncludedChunksPlugin.js000066600000007306150441747060012000 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Module")} Module */ class FlagIncludedChunksPlugin { /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => { compilation.hooks.optimizeChunkIds.tap( "FlagIncludedChunksPlugin", chunks => { const chunkGraph = compilation.chunkGraph; // prepare two bit integers for each module // 2^31 is the max number represented as SMI in v8 // we want the bits distributed this way: // the bit 2^31 is pretty rar and only one module should get it // so it has a probability of 1 / modulesCount // the first bit (2^0) is the easiest and every module could get it // if it doesn't get a better bit // from bit 2^n to 2^(n+1) there is a probability of p // so 1 / modulesCount == p^31 // <=> p = sqrt31(1 / modulesCount) // so we use a modulo of 1 / sqrt31(1 / modulesCount) /** @type {WeakMap} */ const moduleBits = new WeakMap(); const modulesCount = compilation.modules.size; // precalculate the modulo values for each bit const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31); const modulos = Array.from( { length: 31 }, (x, i) => Math.pow(modulo, i) | 0 ); // iterate all modules to generate bit values let i = 0; for (const module of compilation.modules) { let bit = 30; while (i % modulos[bit] !== 0) { bit--; } moduleBits.set(module, 1 << bit); i++; } // iterate all chunks to generate bitmaps /** @type {WeakMap} */ const chunkModulesHash = new WeakMap(); for (const chunk of chunks) { let hash = 0; for (const module of chunkGraph.getChunkModulesIterable(chunk)) { hash |= moduleBits.get(module); } chunkModulesHash.set(chunk, hash); } for (const chunkA of chunks) { const chunkAHash = chunkModulesHash.get(chunkA); const chunkAModulesCount = chunkGraph.getNumberOfChunkModules(chunkA); if (chunkAModulesCount === 0) continue; let bestModule = undefined; for (const module of chunkGraph.getChunkModulesIterable(chunkA)) { if ( bestModule === undefined || chunkGraph.getNumberOfModuleChunks(bestModule) > chunkGraph.getNumberOfModuleChunks(module) ) bestModule = module; } loopB: for (const chunkB of chunkGraph.getModuleChunksIterable( bestModule )) { // as we iterate the same iterables twice // skip if we find ourselves if (chunkA === chunkB) continue; const chunkBModulesCount = chunkGraph.getNumberOfChunkModules(chunkB); // ids for empty chunks are not included if (chunkBModulesCount === 0) continue; // instead of swapping A and B just bail // as we loop twice the current A will be B and B then A if (chunkAModulesCount > chunkBModulesCount) continue; // is chunkA in chunkB? // we do a cheap check for the hash value const chunkBHash = chunkModulesHash.get(chunkB); if ((chunkBHash & chunkAHash) !== chunkAHash) continue; // compare all modules for (const m of chunkGraph.getChunkModulesIterable(chunkA)) { if (!chunkGraph.isModuleInChunk(m, chunkB)) continue loopB; } chunkB.ids.push(chunkA.id); } } } ); }); } } module.exports = FlagIncludedChunksPlugin; RemoveEmptyChunksPlugin.js000066600000002471150441747060011731 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_BASIC, STAGE_ADVANCED } = require("../OptimizationStages"); /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../Compiler")} Compiler */ class RemoveEmptyChunksPlugin { /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => { /** * @param {Iterable} chunks the chunks array * @returns {void} */ const handler = chunks => { const chunkGraph = compilation.chunkGraph; for (const chunk of chunks) { if ( chunkGraph.getNumberOfChunkModules(chunk) === 0 && !chunk.hasRuntime() && chunkGraph.getNumberOfEntryModules(chunk) === 0 ) { compilation.chunkGraph.disconnectChunk(chunk); compilation.chunks.delete(chunk); } } }; // TODO do it once compilation.hooks.optimizeChunks.tap( { name: "RemoveEmptyChunksPlugin", stage: STAGE_BASIC }, handler ); compilation.hooks.optimizeChunks.tap( { name: "RemoveEmptyChunksPlugin", stage: STAGE_ADVANCED }, handler ); }); } } module.exports = RemoveEmptyChunksPlugin; MangleExportsPlugin.js000066600000012035150441747060011066 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { UsageState } = require("../ExportsInfo"); const { numberToIdentifier, NUMBER_OF_IDENTIFIER_START_CHARS, NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS } = require("../Template"); const { assignDeterministicIds } = require("../ids/IdHelpers"); const { compareSelect, compareStringsNumeric } = require("../util/comparators"); /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../ExportsInfo")} ExportsInfo */ /** @typedef {import("../ExportsInfo").ExportInfo} ExportInfo */ /** * @param {ExportsInfo} exportsInfo exports info * @returns {boolean} mangle is possible */ const canMangle = exportsInfo => { if (exportsInfo.otherExportsInfo.getUsed(undefined) !== UsageState.Unused) return false; let hasSomethingToMangle = false; for (const exportInfo of exportsInfo.exports) { if (exportInfo.canMangle === true) { hasSomethingToMangle = true; } } return hasSomethingToMangle; }; // Sort by name const comparator = compareSelect(e => e.name, compareStringsNumeric); /** * @param {boolean} deterministic use deterministic names * @param {ExportsInfo} exportsInfo exports info * @param {boolean} isNamespace is namespace object * @returns {void} */ const mangleExportsInfo = (deterministic, exportsInfo, isNamespace) => { if (!canMangle(exportsInfo)) return; const usedNames = new Set(); /** @type {ExportInfo[]} */ const mangleableExports = []; // Avoid to renamed exports that are not provided when // 1. it's not a namespace export: non-provided exports can be found in prototype chain // 2. there are other provided exports and deterministic mode is chosen: // non-provided exports would break the determinism let avoidMangleNonProvided = !isNamespace; if (!avoidMangleNonProvided && deterministic) { for (const exportInfo of exportsInfo.ownedExports) { if (exportInfo.provided !== false) { avoidMangleNonProvided = true; break; } } } for (const exportInfo of exportsInfo.ownedExports) { const name = exportInfo.name; if (!exportInfo.hasUsedName()) { if ( // Can the export be mangled? exportInfo.canMangle !== true || // Never rename 1 char exports (name.length === 1 && /^[a-zA-Z0-9_$]/.test(name)) || // Don't rename 2 char exports in deterministic mode (deterministic && name.length === 2 && /^[a-zA-Z_$][a-zA-Z0-9_$]|^[1-9][0-9]/.test(name)) || // Don't rename exports that are not provided (avoidMangleNonProvided && exportInfo.provided !== true) ) { exportInfo.setUsedName(name); usedNames.add(name); } else { mangleableExports.push(exportInfo); } } if (exportInfo.exportsInfoOwned) { const used = exportInfo.getUsed(undefined); if ( used === UsageState.OnlyPropertiesUsed || used === UsageState.Unused ) { mangleExportsInfo(deterministic, exportInfo.exportsInfo, false); } } } if (deterministic) { assignDeterministicIds( mangleableExports, e => e.name, comparator, (e, id) => { const name = numberToIdentifier(id); const size = usedNames.size; usedNames.add(name); if (size === usedNames.size) return false; e.setUsedName(name); return true; }, [ NUMBER_OF_IDENTIFIER_START_CHARS, NUMBER_OF_IDENTIFIER_START_CHARS * NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS ], NUMBER_OF_IDENTIFIER_CONTINUATION_CHARS, usedNames.size ); } else { const usedExports = []; const unusedExports = []; for (const exportInfo of mangleableExports) { if (exportInfo.getUsed(undefined) === UsageState.Unused) { unusedExports.push(exportInfo); } else { usedExports.push(exportInfo); } } usedExports.sort(comparator); unusedExports.sort(comparator); let i = 0; for (const list of [usedExports, unusedExports]) { for (const exportInfo of list) { let name; do { name = numberToIdentifier(i++); } while (usedNames.has(name)); exportInfo.setUsedName(name); } } } }; class MangleExportsPlugin { /** * @param {boolean} deterministic use deterministic names */ constructor(deterministic) { this._deterministic = deterministic; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { const { _deterministic: deterministic } = this; compiler.hooks.compilation.tap("MangleExportsPlugin", compilation => { const moduleGraph = compilation.moduleGraph; compilation.hooks.optimizeCodeGeneration.tap( "MangleExportsPlugin", modules => { if (compilation.moduleMemCaches) { throw new Error( "optimization.mangleExports can't be used with cacheUnaffected as export mangling is a global effect" ); } for (const module of modules) { const isNamespace = module.buildMeta && module.buildMeta.exportsType === "namespace"; const exportsInfo = moduleGraph.getExportsInfo(module); mangleExportsInfo(deterministic, exportsInfo, isNamespace); } } ); }); } } module.exports = MangleExportsPlugin; ConcatenatedModule.js000066600000155117150441747060010666 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const eslintScope = require("eslint-scope"); const Referencer = require("eslint-scope/lib/referencer"); const { CachedSource, ConcatSource, ReplaceSource } = require("webpack-sources"); const ConcatenationScope = require("../ConcatenationScope"); const { UsageState } = require("../ExportsInfo"); const Module = require("../Module"); const RuntimeGlobals = require("../RuntimeGlobals"); const Template = require("../Template"); const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency"); const JavascriptParser = require("../javascript/JavascriptParser"); const { equals } = require("../util/ArrayHelpers"); const LazySet = require("../util/LazySet"); const { concatComparators } = require("../util/comparators"); const createHash = require("../util/createHash"); const { makePathsRelative } = require("../util/identifier"); const makeSerializable = require("../util/makeSerializable"); const propertyAccess = require("../util/propertyAccess"); const { filterRuntime, intersectRuntime, mergeRuntimeCondition, mergeRuntimeConditionNonFalse, runtimeConditionToString, subtractRuntimeCondition } = require("../util/runtime"); /** @typedef {import("eslint-scope").Scope} Scope */ /** @typedef {import("webpack-sources").Source} Source */ /** @typedef {import("../../declarations/WebpackOptions").WebpackOptionsNormalized} WebpackOptions */ /** @typedef {import("../ChunkGraph")} ChunkGraph */ /** @typedef {import("../CodeGenerationResults")} CodeGenerationResults */ /** @typedef {import("../Compilation")} Compilation */ /** @typedef {import("../Dependency")} Dependency */ /** @typedef {import("../Dependency").UpdateHashContext} UpdateHashContext */ /** @typedef {import("../DependencyTemplate").DependencyTemplateContext} DependencyTemplateContext */ /** @typedef {import("../DependencyTemplates")} DependencyTemplates */ /** @typedef {import("../ExportsInfo").ExportInfo} ExportInfo */ /** @template T @typedef {import("../InitFragment")} InitFragment */ /** @typedef {import("../Module").CodeGenerationContext} CodeGenerationContext */ /** @typedef {import("../Module").CodeGenerationResult} CodeGenerationResult */ /** @typedef {import("../Module").LibIdentOptions} LibIdentOptions */ /** @typedef {import("../ModuleGraph")} ModuleGraph */ /** @typedef {import("../ModuleGraphConnection")} ModuleGraphConnection */ /** @typedef {import("../ModuleGraphConnection").ConnectionState} ConnectionState */ /** @typedef {import("../RequestShortener")} RequestShortener */ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */ /** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */ /** @typedef {import("../WebpackError")} WebpackError */ /** @typedef {import("../javascript/JavascriptModulesPlugin").ChunkRenderContext} ChunkRenderContext */ /** @typedef {import("../util/Hash")} Hash */ /** @typedef {typeof import("../util/Hash")} HashConstructor */ /** @typedef {import("../util/fs").InputFileSystem} InputFileSystem */ /** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */ // fix eslint-scope to support class properties correctly // cspell:word Referencer const ReferencerClass = Referencer; if (!ReferencerClass.prototype.PropertyDefinition) { ReferencerClass.prototype.PropertyDefinition = ReferencerClass.prototype.Property; } /** * @typedef {Object} ReexportInfo * @property {Module} module * @property {string[]} export */ /** @typedef {RawBinding | SymbolBinding} Binding */ /** * @typedef {Object} RawBinding * @property {ModuleInfo} info * @property {string} rawName * @property {string=} comment * @property {string[]} ids * @property {string[]} exportName */ /** * @typedef {Object} SymbolBinding * @property {ConcatenatedModuleInfo} info * @property {string} name * @property {string=} comment * @property {string[]} ids * @property {string[]} exportName */ /** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo } ModuleInfo */ /** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo | ReferenceToModuleInfo } ModuleInfoOrReference */ /** * @typedef {Object} ConcatenatedModuleInfo * @property {"concatenated"} type * @property {Module} module * @property {number} index * @property {Object} ast * @property {Source} internalSource * @property {ReplaceSource} source * @property {InitFragment[]=} chunkInitFragments * @property {Iterable} runtimeRequirements * @property {Scope} globalScope * @property {Scope} moduleScope * @property {Map} internalNames * @property {Map} exportMap * @property {Map} rawExportMap * @property {string=} namespaceExportSymbol * @property {string} namespaceObjectName * @property {boolean} interopNamespaceObjectUsed * @property {string} interopNamespaceObjectName * @property {boolean} interopNamespaceObject2Used * @property {string} interopNamespaceObject2Name * @property {boolean} interopDefaultAccessUsed * @property {string} interopDefaultAccessName */ /** * @typedef {Object} ExternalModuleInfo * @property {"external"} type * @property {Module} module * @property {RuntimeSpec | boolean} runtimeCondition * @property {number} index * @property {string} name * @property {boolean} interopNamespaceObjectUsed * @property {string} interopNamespaceObjectName * @property {boolean} interopNamespaceObject2Used * @property {string} interopNamespaceObject2Name * @property {boolean} interopDefaultAccessUsed * @property {string} interopDefaultAccessName */ /** * @typedef {Object} ReferenceToModuleInfo * @property {"reference"} type * @property {RuntimeSpec | boolean} runtimeCondition * @property {ConcatenatedModuleInfo | ExternalModuleInfo} target */ const RESERVED_NAMES = new Set( [ // internal names (should always be renamed) ConcatenationScope.DEFAULT_EXPORT, ConcatenationScope.NAMESPACE_OBJECT_EXPORT, // keywords "abstract,arguments,async,await,boolean,break,byte,case,catch,char,class,const,continue", "debugger,default,delete,do,double,else,enum,eval,export,extends,false,final,finally,float", "for,function,goto,if,implements,import,in,instanceof,int,interface,let,long,native,new,null", "package,private,protected,public,return,short,static,super,switch,synchronized,this,throw", "throws,transient,true,try,typeof,var,void,volatile,while,with,yield", // commonjs/amd "module,__dirname,__filename,exports,require,define", // js globals "Array,Date,eval,function,hasOwnProperty,Infinity,isFinite,isNaN,isPrototypeOf,length,Math", "NaN,name,Number,Object,prototype,String,toString,undefined,valueOf", // browser globals "alert,all,anchor,anchors,area,assign,blur,button,checkbox,clearInterval,clearTimeout", "clientInformation,close,closed,confirm,constructor,crypto,decodeURI,decodeURIComponent", "defaultStatus,document,element,elements,embed,embeds,encodeURI,encodeURIComponent,escape", "event,fileUpload,focus,form,forms,frame,innerHeight,innerWidth,layer,layers,link,location", "mimeTypes,navigate,navigator,frames,frameRate,hidden,history,image,images,offscreenBuffering", "open,opener,option,outerHeight,outerWidth,packages,pageXOffset,pageYOffset,parent,parseFloat", "parseInt,password,pkcs11,plugin,prompt,propertyIsEnum,radio,reset,screenX,screenY,scroll", "secure,select,self,setInterval,setTimeout,status,submit,taint,text,textarea,top,unescape", "untaint,window", // window events "onblur,onclick,onerror,onfocus,onkeydown,onkeypress,onkeyup,onmouseover,onload,onmouseup,onmousedown,onsubmit" ] .join(",") .split(",") ); const createComparator = (property, comparator) => (a, b) => comparator(a[property], b[property]); const compareNumbers = (a, b) => { if (isNaN(a)) { if (!isNaN(b)) { return 1; } } else { if (isNaN(b)) { return -1; } if (a !== b) { return a < b ? -1 : 1; } } return 0; }; const bySourceOrder = createComparator("sourceOrder", compareNumbers); const byRangeStart = createComparator("rangeStart", compareNumbers); const joinIterableWithComma = iterable => { // This is more performant than Array.from().join(", ") // as it doesn't create an array let str = ""; let first = true; for (const item of iterable) { if (first) { first = false; } else { str += ", "; } str += item; } return str; }; /** * @typedef {Object} ConcatenationEntry * @property {"concatenated" | "external"} type * @property {Module} module * @property {RuntimeSpec | boolean} runtimeCondition */ /** * @param {ModuleGraph} moduleGraph the module graph * @param {ModuleInfo} info module info * @param {string[]} exportName exportName * @param {Map} moduleToInfoMap moduleToInfoMap * @param {RuntimeSpec} runtime for which runtime * @param {RequestShortener} requestShortener the request shortener * @param {RuntimeTemplate} runtimeTemplate the runtime template * @param {Set} neededNamespaceObjects modules for which a namespace object should be generated * @param {boolean} asCall asCall * @param {boolean} strictHarmonyModule strictHarmonyModule * @param {boolean | undefined} asiSafe asiSafe * @param {Set} alreadyVisited alreadyVisited * @returns {Binding} the final variable */ const getFinalBinding = ( moduleGraph, info, exportName, moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, asCall, strictHarmonyModule, asiSafe, alreadyVisited = new Set() ) => { const exportsType = info.module.getExportsType( moduleGraph, strictHarmonyModule ); if (exportName.length === 0) { switch (exportsType) { case "default-only": info.interopNamespaceObject2Used = true; return { info, rawName: info.interopNamespaceObject2Name, ids: exportName, exportName }; case "default-with-named": info.interopNamespaceObjectUsed = true; return { info, rawName: info.interopNamespaceObjectName, ids: exportName, exportName }; case "namespace": case "dynamic": break; default: throw new Error(`Unexpected exportsType ${exportsType}`); } } else { switch (exportsType) { case "namespace": break; case "default-with-named": switch (exportName[0]) { case "default": exportName = exportName.slice(1); break; case "__esModule": return { info, rawName: "/* __esModule */true", ids: exportName.slice(1), exportName }; } break; case "default-only": { const exportId = exportName[0]; if (exportId === "__esModule") { return { info, rawName: "/* __esModule */true", ids: exportName.slice(1), exportName }; } exportName = exportName.slice(1); if (exportId !== "default") { return { info, rawName: "/* non-default import from default-exporting module */undefined", ids: exportName, exportName }; } break; } case "dynamic": switch (exportName[0]) { case "default": { exportName = exportName.slice(1); info.interopDefaultAccessUsed = true; const defaultExport = asCall ? `${info.interopDefaultAccessName}()` : asiSafe ? `(${info.interopDefaultAccessName}())` : asiSafe === false ? `;(${info.interopDefaultAccessName}())` : `${info.interopDefaultAccessName}.a`; return { info, rawName: defaultExport, ids: exportName, exportName }; } case "__esModule": return { info, rawName: "/* __esModule */true", ids: exportName.slice(1), exportName }; } break; default: throw new Error(`Unexpected exportsType ${exportsType}`); } } if (exportName.length === 0) { switch (info.type) { case "concatenated": neededNamespaceObjects.add(info); return { info, rawName: info.namespaceObjectName, ids: exportName, exportName }; case "external": return { info, rawName: info.name, ids: exportName, exportName }; } } const exportsInfo = moduleGraph.getExportsInfo(info.module); const exportInfo = exportsInfo.getExportInfo(exportName[0]); if (alreadyVisited.has(exportInfo)) { return { info, rawName: "/* circular reexport */ Object(function x() { x() }())", ids: [], exportName }; } alreadyVisited.add(exportInfo); switch (info.type) { case "concatenated": { const exportId = exportName[0]; if (exportInfo.provided === false) { // It's not provided, but it could be on the prototype neededNamespaceObjects.add(info); return { info, rawName: info.namespaceObjectName, ids: exportName, exportName }; } const directExport = info.exportMap && info.exportMap.get(exportId); if (directExport) { const usedName = /** @type {string[]} */ ( exportsInfo.getUsedName(exportName, runtime) ); if (!usedName) { return { info, rawName: "/* unused export */ undefined", ids: exportName.slice(1), exportName }; } return { info, name: directExport, ids: usedName.slice(1), exportName }; } const rawExport = info.rawExportMap && info.rawExportMap.get(exportId); if (rawExport) { return { info, rawName: rawExport, ids: exportName.slice(1), exportName }; } const reexport = exportInfo.findTarget(moduleGraph, module => moduleToInfoMap.has(module) ); if (reexport === false) { throw new Error( `Target module of reexport from '${info.module.readableIdentifier( requestShortener )}' is not part of the concatenation (export '${exportId}')\nModules in the concatenation:\n${Array.from( moduleToInfoMap, ([m, info]) => ` * ${info.type} ${m.readableIdentifier(requestShortener)}` ).join("\n")}` ); } if (reexport) { const refInfo = moduleToInfoMap.get(reexport.module); return getFinalBinding( moduleGraph, refInfo, reexport.export ? [...reexport.export, ...exportName.slice(1)] : exportName.slice(1), moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, asCall, info.module.buildMeta.strictHarmonyModule, asiSafe, alreadyVisited ); } if (info.namespaceExportSymbol) { const usedName = /** @type {string[]} */ ( exportsInfo.getUsedName(exportName, runtime) ); return { info, rawName: info.namespaceObjectName, ids: usedName, exportName }; } throw new Error( `Cannot get final name for export '${exportName.join( "." )}' of ${info.module.readableIdentifier(requestShortener)}` ); } case "external": { const used = /** @type {string[]} */ ( exportsInfo.getUsedName(exportName, runtime) ); if (!used) { return { info, rawName: "/* unused export */ undefined", ids: exportName.slice(1), exportName }; } const comment = equals(used, exportName) ? "" : Template.toNormalComment(`${exportName.join(".")}`); return { info, rawName: info.name + comment, ids: used, exportName }; } } }; /** * @param {ModuleGraph} moduleGraph the module graph * @param {ModuleInfo} info module info * @param {string[]} exportName exportName * @param {Map} moduleToInfoMap moduleToInfoMap * @param {RuntimeSpec} runtime for which runtime * @param {RequestShortener} requestShortener the request shortener * @param {RuntimeTemplate} runtimeTemplate the runtime template * @param {Set} neededNamespaceObjects modules for which a namespace object should be generated * @param {boolean} asCall asCall * @param {boolean} callContext callContext * @param {boolean} strictHarmonyModule strictHarmonyModule * @param {boolean | undefined} asiSafe asiSafe * @returns {string} the final name */ const getFinalName = ( moduleGraph, info, exportName, moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, asCall, callContext, strictHarmonyModule, asiSafe ) => { const binding = getFinalBinding( moduleGraph, info, exportName, moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, asCall, strictHarmonyModule, asiSafe ); { const { ids, comment } = binding; let reference; let isPropertyAccess; if ("rawName" in binding) { reference = `${binding.rawName}${comment || ""}${propertyAccess(ids)}`; isPropertyAccess = ids.length > 0; } else { const { info, name: exportId } = binding; const name = info.internalNames.get(exportId); if (!name) { throw new Error( `The export "${exportId}" in "${info.module.readableIdentifier( requestShortener )}" has no internal name (existing names: ${ Array.from( info.internalNames, ([name, symbol]) => `${name}: ${symbol}` ).join(", ") || "none" })` ); } reference = `${name}${comment || ""}${propertyAccess(ids)}`; isPropertyAccess = ids.length > 1; } if (isPropertyAccess && asCall && callContext === false) { return asiSafe ? `(0,${reference})` : asiSafe === false ? `;(0,${reference})` : `/*#__PURE__*/Object(${reference})`; } return reference; } }; const addScopeSymbols = (s, nameSet, scopeSet1, scopeSet2) => { let scope = s; while (scope) { if (scopeSet1.has(scope)) break; if (scopeSet2.has(scope)) break; scopeSet1.add(scope); for (const variable of scope.variables) { nameSet.add(variable.name); } scope = scope.upper; } }; const getAllReferences = variable => { let set = variable.references; // Look for inner scope variables too (like in class Foo { t() { Foo } }) const identifiers = new Set(variable.identifiers); for (const scope of variable.scope.childScopes) { for (const innerVar of scope.variables) { if (innerVar.identifiers.some(id => identifiers.has(id))) { set = set.concat(innerVar.references); break; } } } return set; }; const getPathInAst = (ast, node) => { if (ast === node) { return []; } const nr = node.range; const enterNode = n => { if (!n) return undefined; const r = n.range; if (r) { if (r[0] <= nr[0] && r[1] >= nr[1]) { const path = getPathInAst(n, node); if (path) { path.push(n); return path; } } } return undefined; }; if (Array.isArray(ast)) { for (let i = 0; i < ast.length; i++) { const enterResult = enterNode(ast[i]); if (enterResult !== undefined) return enterResult; } } else if (ast && typeof ast === "object") { const keys = Object.keys(ast); for (let i = 0; i < keys.length; i++) { const value = ast[keys[i]]; if (Array.isArray(value)) { const pathResult = getPathInAst(value, node); if (pathResult !== undefined) return pathResult; } else if (value && typeof value === "object") { const enterResult = enterNode(value); if (enterResult !== undefined) return enterResult; } } } }; const TYPES = new Set(["javascript"]); class ConcatenatedModule extends Module { /** * @param {Module} rootModule the root module of the concatenation * @param {Set} modules all modules in the concatenation (including the root module) * @param {RuntimeSpec} runtime the runtime * @param {Object=} associatedObjectForCache object for caching * @param {string | HashConstructor=} hashFunction hash function to use * @returns {ConcatenatedModule} the module */ static create( rootModule, modules, runtime, associatedObjectForCache, hashFunction = "md4" ) { const identifier = ConcatenatedModule._createIdentifier( rootModule, modules, associatedObjectForCache, hashFunction ); return new ConcatenatedModule({ identifier, rootModule, modules, runtime }); } /** * @param {Object} options options * @param {string} options.identifier the identifier of the module * @param {Module=} options.rootModule the root module of the concatenation * @param {RuntimeSpec} options.runtime the selected runtime * @param {Set=} options.modules all concatenated modules */ constructor({ identifier, rootModule, modules, runtime }) { super("javascript/esm", null, rootModule && rootModule.layer); // Info from Factory /** @type {string} */ this._identifier = identifier; /** @type {Module} */ this.rootModule = rootModule; /** @type {Set} */ this._modules = modules; this._runtime = runtime; this.factoryMeta = rootModule && rootModule.factoryMeta; } /** * Assuming this module is in the cache. Update the (cached) module with * the fresh module from the factory. Usually updates internal references * and properties. * @param {Module} module fresh module * @returns {void} */ updateCacheModule(module) { throw new Error("Must not be called"); } /** * @returns {Set} types available (do not mutate) */ getSourceTypes() { return TYPES; } get modules() { return Array.from(this._modules); } /** * @returns {string} a unique identifier of the module */ identifier() { return this._identifier; } /** * @param {RequestShortener} requestShortener the request shortener * @returns {string} a user readable identifier of the module */ readableIdentifier(requestShortener) { return ( this.rootModule.readableIdentifier(requestShortener) + ` + ${this._modules.size - 1} modules` ); } /** * @param {LibIdentOptions} options options * @returns {string | null} an identifier for library inclusion */ libIdent(options) { return this.rootModule.libIdent(options); } /** * @returns {string | null} absolute path which should be used for condition matching (usually the resource path) */ nameForCondition() { return this.rootModule.nameForCondition(); } /** * @param {ModuleGraph} moduleGraph the module graph * @returns {ConnectionState} how this module should be connected to referencing modules when consumed for side-effects only */ getSideEffectsConnectionState(moduleGraph) { return this.rootModule.getSideEffectsConnectionState(moduleGraph); } /** * @param {WebpackOptions} options webpack options * @param {Compilation} compilation the compilation * @param {ResolverWithOptions} resolver the resolver * @param {InputFileSystem} fs the file system * @param {function(WebpackError=): void} callback callback function * @returns {void} */ build(options, compilation, resolver, fs, callback) { const { rootModule } = this; this.buildInfo = { strict: true, cacheable: true, moduleArgument: rootModule.buildInfo.moduleArgument, exportsArgument: rootModule.buildInfo.exportsArgument, fileDependencies: new LazySet(), contextDependencies: new LazySet(), missingDependencies: new LazySet(), topLevelDeclarations: new Set(), assets: undefined }; this.buildMeta = rootModule.buildMeta; this.clearDependenciesAndBlocks(); this.clearWarningsAndErrors(); for (const m of this._modules) { // populate cacheable if (!m.buildInfo.cacheable) { this.buildInfo.cacheable = false; } // populate dependencies for (const d of m.dependencies.filter( dep => !(dep instanceof HarmonyImportDependency) || !this._modules.has(compilation.moduleGraph.getModule(dep)) )) { this.dependencies.push(d); } // populate blocks for (const d of m.blocks) { this.blocks.push(d); } // populate warnings const warnings = m.getWarnings(); if (warnings !== undefined) { for (const warning of warnings) { this.addWarning(warning); } } // populate errors const errors = m.getErrors(); if (errors !== undefined) { for (const error of errors) { this.addError(error); } } // populate topLevelDeclarations if (m.buildInfo.topLevelDeclarations) { const topLevelDeclarations = this.buildInfo.topLevelDeclarations; if (topLevelDeclarations !== undefined) { for (const decl of m.buildInfo.topLevelDeclarations) { topLevelDeclarations.add(decl); } } } else { this.buildInfo.topLevelDeclarations = undefined; } // populate assets if (m.buildInfo.assets) { if (this.buildInfo.assets === undefined) { this.buildInfo.assets = Object.create(null); } Object.assign(this.buildInfo.assets, m.buildInfo.assets); } if (m.buildInfo.assetsInfo) { if (this.buildInfo.assetsInfo === undefined) { this.buildInfo.assetsInfo = new Map(); } for (const [key, value] of m.buildInfo.assetsInfo) { this.buildInfo.assetsInfo.set(key, value); } } } callback(); } /** * @param {string=} type the source type for which the size should be estimated * @returns {number} the estimated size of the module (must be non-zero) */ size(type) { // Guess size from embedded modules let size = 0; for (const module of this._modules) { size += module.size(type); } return size; } /** * @private * @param {Module} rootModule the root of the concatenation * @param {Set} modulesSet a set of modules which should be concatenated * @param {RuntimeSpec} runtime for this runtime * @param {ModuleGraph} moduleGraph the module graph * @returns {ConcatenationEntry[]} concatenation list */ _createConcatenationList(rootModule, modulesSet, runtime, moduleGraph) { /** @type {ConcatenationEntry[]} */ const list = []; /** @type {Map} */ const existingEntries = new Map(); /** * @param {Module} module a module * @returns {Iterable<{ connection: ModuleGraphConnection, runtimeCondition: RuntimeSpec | true }>} imported modules in order */ const getConcatenatedImports = module => { let connections = Array.from(moduleGraph.getOutgoingConnections(module)); if (module === rootModule) { for (const c of moduleGraph.getOutgoingConnections(this)) connections.push(c); } /** * @type {Array<{ connection: ModuleGraphConnection, sourceOrder: number, rangeStart: number }>} */ const references = connections .filter(connection => { if (!(connection.dependency instanceof HarmonyImportDependency)) return false; return ( connection && connection.resolvedOriginModule === module && connection.module && connection.isTargetActive(runtime) ); }) .map(connection => { const dep = /** @type {HarmonyImportDependency} */ ( connection.dependency ); return { connection, sourceOrder: dep.sourceOrder, rangeStart: dep.range && dep.range[0] }; }); /** * bySourceOrder * @example * import a from "a"; // sourceOrder=1 * import b from "b"; // sourceOrder=2 * * byRangeStart * @example * import {a, b} from "a"; // sourceOrder=1 * a.a(); // first range * b.b(); // second range * * If there is no reexport, we have the same source. * If there is reexport, but module has side effects, this will lead to reexport module only. * If there is side-effects-free reexport, we can get simple deterministic result with range start comparison. */ references.sort(concatComparators(bySourceOrder, byRangeStart)); /** @type {Map} */ const referencesMap = new Map(); for (const { connection } of references) { const runtimeCondition = filterRuntime(runtime, r => connection.isTargetActive(r) ); if (runtimeCondition === false) continue; const module = connection.module; const entry = referencesMap.get(module); if (entry === undefined) { referencesMap.set(module, { connection, runtimeCondition }); continue; } entry.runtimeCondition = mergeRuntimeConditionNonFalse( entry.runtimeCondition, runtimeCondition, runtime ); } return referencesMap.values(); }; /** * @param {ModuleGraphConnection} connection graph connection * @param {RuntimeSpec | true} runtimeCondition runtime condition * @returns {void} */ const enterModule = (connection, runtimeCondition) => { const module = connection.module; if (!module) return; const existingEntry = existingEntries.get(module); if (existingEntry === true) { return; } if (modulesSet.has(module)) { existingEntries.set(module, true); if (runtimeCondition !== true) { throw new Error( `Cannot runtime-conditional concatenate a module (${module.identifier()} in ${this.rootModule.identifier()}, ${runtimeConditionToString( runtimeCondition )}). This should not happen.` ); } const imports = getConcatenatedImports(module); for (const { connection, runtimeCondition } of imports) enterModule(connection, runtimeCondition); list.push({ type: "concatenated", module: connection.module, runtimeCondition }); } else { if (existingEntry !== undefined) { const reducedRuntimeCondition = subtractRuntimeCondition( runtimeCondition, existingEntry, runtime ); if (reducedRuntimeCondition === false) return; runtimeCondition = reducedRuntimeCondition; existingEntries.set( connection.module, mergeRuntimeConditionNonFalse( existingEntry, runtimeCondition, runtime ) ); } else { existingEntries.set(connection.module, runtimeCondition); } if (list.length > 0) { const lastItem = list[list.length - 1]; if ( lastItem.type === "external" && lastItem.module === connection.module ) { lastItem.runtimeCondition = mergeRuntimeCondition( lastItem.runtimeCondition, runtimeCondition, runtime ); return; } } list.push({ type: "external", get module() { // We need to use a getter here, because the module in the dependency // could be replaced by some other process (i. e. also replaced with a // concatenated module) return connection.module; }, runtimeCondition }); } }; existingEntries.set(rootModule, true); const imports = getConcatenatedImports(rootModule); for (const { connection, runtimeCondition } of imports) enterModule(connection, runtimeCondition); list.push({ type: "concatenated", module: rootModule, runtimeCondition: true }); return list; } /** * @param {Module} rootModule the root module of the concatenation * @param {Set} modules all modules in the concatenation (including the root module) * @param {Object=} associatedObjectForCache object for caching * @param {string | HashConstructor=} hashFunction hash function to use * @returns {string} the identifier */ static _createIdentifier( rootModule, modules, associatedObjectForCache, hashFunction = "md4" ) { const cachedMakePathsRelative = makePathsRelative.bindContextCache( rootModule.context, associatedObjectForCache ); let identifiers = []; for (const module of modules) { identifiers.push(cachedMakePathsRelative(module.identifier())); } identifiers.sort(); const hash = createHash(hashFunction); hash.update(identifiers.join(" ")); return rootModule.identifier() + "|" + hash.digest("hex"); } /** * @param {LazySet} fileDependencies set where file dependencies are added to * @param {LazySet} contextDependencies set where context dependencies are added to * @param {LazySet} missingDependencies set where missing dependencies are added to * @param {LazySet} buildDependencies set where build dependencies are added to */ addCacheDependencies( fileDependencies, contextDependencies, missingDependencies, buildDependencies ) { for (const module of this._modules) { module.addCacheDependencies( fileDependencies, contextDependencies, missingDependencies, buildDependencies ); } } /** * @param {CodeGenerationContext} context context for code generation * @returns {CodeGenerationResult} result */ codeGeneration({ dependencyTemplates, runtimeTemplate, moduleGraph, chunkGraph, runtime: generationRuntime, codeGenerationResults }) { /** @type {Set} */ const runtimeRequirements = new Set(); const runtime = intersectRuntime(generationRuntime, this._runtime); const requestShortener = runtimeTemplate.requestShortener; // Meta info for each module const [modulesWithInfo, moduleToInfoMap] = this._getModulesWithInfo( moduleGraph, runtime ); // Set with modules that need a generated namespace object /** @type {Set} */ const neededNamespaceObjects = new Set(); // Generate source code and analyse scopes // Prepare a ReplaceSource for the final source for (const info of moduleToInfoMap.values()) { this._analyseModule( moduleToInfoMap, info, dependencyTemplates, runtimeTemplate, moduleGraph, chunkGraph, runtime, codeGenerationResults ); } // List of all used names to avoid conflicts const allUsedNames = new Set(RESERVED_NAMES); // Updated Top level declarations are created by renaming const topLevelDeclarations = new Set(); // List of additional names in scope for module references /** @type {Map, alreadyCheckedScopes: Set }>} */ const usedNamesInScopeInfo = new Map(); /** * @param {string} module module identifier * @param {string} id export id * @returns {{ usedNames: Set, alreadyCheckedScopes: Set }} info */ const getUsedNamesInScopeInfo = (module, id) => { const key = `${module}-${id}`; let info = usedNamesInScopeInfo.get(key); if (info === undefined) { info = { usedNames: new Set(), alreadyCheckedScopes: new Set() }; usedNamesInScopeInfo.set(key, info); } return info; }; // Set of already checked scopes const ignoredScopes = new Set(); // get all global names for (const info of modulesWithInfo) { if (info.type === "concatenated") { // ignore symbols from moduleScope if (info.moduleScope) { ignoredScopes.add(info.moduleScope); } // The super class expression in class scopes behaves weird // We get ranges of all super class expressions to make // renaming to work correctly const superClassCache = new WeakMap(); const getSuperClassExpressions = scope => { const cacheEntry = superClassCache.get(scope); if (cacheEntry !== undefined) return cacheEntry; const superClassExpressions = []; for (const childScope of scope.childScopes) { if (childScope.type !== "class") continue; const block = childScope.block; if ( (block.type === "ClassDeclaration" || block.type === "ClassExpression") && block.superClass ) { superClassExpressions.push({ range: block.superClass.range, variables: childScope.variables }); } } superClassCache.set(scope, superClassExpressions); return superClassExpressions; }; // add global symbols if (info.globalScope) { for (const reference of info.globalScope.through) { const name = reference.identifier.name; if (ConcatenationScope.isModuleReference(name)) { const match = ConcatenationScope.matchModuleReference(name); if (!match) continue; const referencedInfo = modulesWithInfo[match.index]; if (referencedInfo.type === "reference") throw new Error("Module reference can't point to a reference"); const binding = getFinalBinding( moduleGraph, referencedInfo, match.ids, moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, false, info.module.buildMeta.strictHarmonyModule, true ); if (!binding.ids) continue; const { usedNames, alreadyCheckedScopes } = getUsedNamesInScopeInfo( binding.info.module.identifier(), "name" in binding ? binding.name : "" ); for (const expr of getSuperClassExpressions(reference.from)) { if ( expr.range[0] <= reference.identifier.range[0] && expr.range[1] >= reference.identifier.range[1] ) { for (const variable of expr.variables) { usedNames.add(variable.name); } } } addScopeSymbols( reference.from, usedNames, alreadyCheckedScopes, ignoredScopes ); } else { allUsedNames.add(name); } } } } } // generate names for symbols for (const info of moduleToInfoMap.values()) { const { usedNames: namespaceObjectUsedNames } = getUsedNamesInScopeInfo( info.module.identifier(), "" ); switch (info.type) { case "concatenated": { for (const variable of info.moduleScope.variables) { const name = variable.name; const { usedNames, alreadyCheckedScopes } = getUsedNamesInScopeInfo( info.module.identifier(), name ); if (allUsedNames.has(name) || usedNames.has(name)) { const references = getAllReferences(variable); for (const ref of references) { addScopeSymbols( ref.from, usedNames, alreadyCheckedScopes, ignoredScopes ); } const newName = this.findNewName( name, allUsedNames, usedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(newName); info.internalNames.set(name, newName); topLevelDeclarations.add(newName); const source = info.source; const allIdentifiers = new Set( references.map(r => r.identifier).concat(variable.identifiers) ); for (const identifier of allIdentifiers) { const r = identifier.range; const path = getPathInAst(info.ast, identifier); if (path && path.length > 1) { const maybeProperty = path[1].type === "AssignmentPattern" && path[1].left === path[0] ? path[2] : path[1]; if ( maybeProperty.type === "Property" && maybeProperty.shorthand ) { source.insert(r[1], `: ${newName}`); continue; } } source.replace(r[0], r[1] - 1, newName); } } else { allUsedNames.add(name); info.internalNames.set(name, name); topLevelDeclarations.add(name); } } let namespaceObjectName; if (info.namespaceExportSymbol) { namespaceObjectName = info.internalNames.get( info.namespaceExportSymbol ); } else { namespaceObjectName = this.findNewName( "namespaceObject", allUsedNames, namespaceObjectUsedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(namespaceObjectName); } info.namespaceObjectName = namespaceObjectName; topLevelDeclarations.add(namespaceObjectName); break; } case "external": { const externalName = this.findNewName( "", allUsedNames, namespaceObjectUsedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(externalName); info.name = externalName; topLevelDeclarations.add(externalName); break; } } if (info.module.buildMeta.exportsType !== "namespace") { const externalNameInterop = this.findNewName( "namespaceObject", allUsedNames, namespaceObjectUsedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(externalNameInterop); info.interopNamespaceObjectName = externalNameInterop; topLevelDeclarations.add(externalNameInterop); } if ( info.module.buildMeta.exportsType === "default" && info.module.buildMeta.defaultObject !== "redirect" ) { const externalNameInterop = this.findNewName( "namespaceObject2", allUsedNames, namespaceObjectUsedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(externalNameInterop); info.interopNamespaceObject2Name = externalNameInterop; topLevelDeclarations.add(externalNameInterop); } if ( info.module.buildMeta.exportsType === "dynamic" || !info.module.buildMeta.exportsType ) { const externalNameInterop = this.findNewName( "default", allUsedNames, namespaceObjectUsedNames, info.module.readableIdentifier(requestShortener) ); allUsedNames.add(externalNameInterop); info.interopDefaultAccessName = externalNameInterop; topLevelDeclarations.add(externalNameInterop); } } // Find and replace references to modules for (const info of moduleToInfoMap.values()) { if (info.type === "concatenated") { for (const reference of info.globalScope.through) { const name = reference.identifier.name; const match = ConcatenationScope.matchModuleReference(name); if (match) { const referencedInfo = modulesWithInfo[match.index]; if (referencedInfo.type === "reference") throw new Error("Module reference can't point to a reference"); const finalName = getFinalName( moduleGraph, referencedInfo, match.ids, moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, match.call, !match.directImport, info.module.buildMeta.strictHarmonyModule, match.asiSafe ); const r = reference.identifier.range; const source = info.source; // range is extended by 2 chars to cover the appended "._" source.replace(r[0], r[1] + 1, finalName); } } } } // Map with all root exposed used exports /** @type {Map} */ const exportsMap = new Map(); // Set with all root exposed unused exports /** @type {Set} */ const unusedExports = new Set(); const rootInfo = /** @type {ConcatenatedModuleInfo} */ ( moduleToInfoMap.get(this.rootModule) ); const strictHarmonyModule = rootInfo.module.buildMeta.strictHarmonyModule; const exportsInfo = moduleGraph.getExportsInfo(rootInfo.module); for (const exportInfo of exportsInfo.orderedExports) { const name = exportInfo.name; if (exportInfo.provided === false) continue; const used = exportInfo.getUsedName(undefined, runtime); if (!used) { unusedExports.add(name); continue; } exportsMap.set(used, requestShortener => { try { const finalName = getFinalName( moduleGraph, rootInfo, [name], moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, false, false, strictHarmonyModule, true ); return `/* ${ exportInfo.isReexport() ? "reexport" : "binding" } */ ${finalName}`; } catch (e) { e.message += `\nwhile generating the root export '${name}' (used name: '${used}')`; throw e; } }); } const result = new ConcatSource(); // add harmony compatibility flag (must be first because of possible circular dependencies) if ( moduleGraph.getExportsInfo(this).otherExportsInfo.getUsed(runtime) !== UsageState.Unused ) { result.add(`// ESM COMPAT FLAG\n`); result.add( runtimeTemplate.defineEsModuleFlagStatement({ exportsArgument: this.exportsArgument, runtimeRequirements }) ); } // define exports if (exportsMap.size > 0) { runtimeRequirements.add(RuntimeGlobals.exports); runtimeRequirements.add(RuntimeGlobals.definePropertyGetters); const definitions = []; for (const [key, value] of exportsMap) { definitions.push( `\n ${JSON.stringify(key)}: ${runtimeTemplate.returningFunction( value(requestShortener) )}` ); } result.add(`\n// EXPORTS\n`); result.add( `${RuntimeGlobals.definePropertyGetters}(${ this.exportsArgument }, {${definitions.join(",")}\n});\n` ); } // list unused exports if (unusedExports.size > 0) { result.add( `\n// UNUSED EXPORTS: ${joinIterableWithComma(unusedExports)}\n` ); } // generate namespace objects const namespaceObjectSources = new Map(); for (const info of neededNamespaceObjects) { if (info.namespaceExportSymbol) continue; const nsObj = []; const exportsInfo = moduleGraph.getExportsInfo(info.module); for (const exportInfo of exportsInfo.orderedExports) { if (exportInfo.provided === false) continue; const usedName = exportInfo.getUsedName(undefined, runtime); if (usedName) { const finalName = getFinalName( moduleGraph, info, [exportInfo.name], moduleToInfoMap, runtime, requestShortener, runtimeTemplate, neededNamespaceObjects, false, undefined, info.module.buildMeta.strictHarmonyModule, true ); nsObj.push( `\n ${JSON.stringify( usedName )}: ${runtimeTemplate.returningFunction(finalName)}` ); } } const name = info.namespaceObjectName; const defineGetters = nsObj.length > 0 ? `${RuntimeGlobals.definePropertyGetters}(${name}, {${nsObj.join( "," )}\n});\n` : ""; if (nsObj.length > 0) runtimeRequirements.add(RuntimeGlobals.definePropertyGetters); namespaceObjectSources.set( info, ` // NAMESPACE OBJECT: ${info.module.readableIdentifier(requestShortener)} var ${name} = {}; ${RuntimeGlobals.makeNamespaceObject}(${name}); ${defineGetters}` ); runtimeRequirements.add(RuntimeGlobals.makeNamespaceObject); } // define required namespace objects (must be before evaluation modules) for (const info of modulesWithInfo) { if (info.type === "concatenated") { const source = namespaceObjectSources.get(info); if (!source) continue; result.add(source); } } const chunkInitFragments = []; // evaluate modules in order for (const rawInfo of modulesWithInfo) { let name; let isConditional = false; const info = rawInfo.type === "reference" ? rawInfo.target : rawInfo; switch (info.type) { case "concatenated": { result.add( `\n;// CONCATENATED MODULE: ${info.module.readableIdentifier( requestShortener )}\n` ); result.add(info.source); if (info.chunkInitFragments) { for (const f of info.chunkInitFragments) chunkInitFragments.push(f); } if (info.runtimeRequirements) { for (const r of info.runtimeRequirements) { runtimeRequirements.add(r); } } name = info.namespaceObjectName; break; } case "external": { result.add( `\n// EXTERNAL MODULE: ${info.module.readableIdentifier( requestShortener )}\n` ); runtimeRequirements.add(RuntimeGlobals.require); const { runtimeCondition } = /** @type {ExternalModuleInfo | ReferenceToModuleInfo} */ (rawInfo); const condition = runtimeTemplate.runtimeConditionExpression({ chunkGraph, runtimeCondition, runtime, runtimeRequirements }); if (condition !== "true") { isConditional = true; result.add(`if (${condition}) {\n`); } result.add( `var ${info.name} = __webpack_require__(${JSON.stringify( chunkGraph.getModuleId(info.module) )});` ); name = info.name; break; } default: // @ts-expect-error never is expected here throw new Error(`Unsupported concatenation entry type ${info.type}`); } if (info.interopNamespaceObjectUsed) { runtimeRequirements.add(RuntimeGlobals.createFakeNamespaceObject); result.add( `\nvar ${info.interopNamespaceObjectName} = /*#__PURE__*/${RuntimeGlobals.createFakeNamespaceObject}(${name}, 2);` ); } if (info.interopNamespaceObject2Used) { runtimeRequirements.add(RuntimeGlobals.createFakeNamespaceObject); result.add( `\nvar ${info.interopNamespaceObject2Name} = /*#__PURE__*/${RuntimeGlobals.createFakeNamespaceObject}(${name});` ); } if (info.interopDefaultAccessUsed) { runtimeRequirements.add(RuntimeGlobals.compatGetDefaultExport); result.add( `\nvar ${info.interopDefaultAccessName} = /*#__PURE__*/${RuntimeGlobals.compatGetDefaultExport}(${name});` ); } if (isConditional) { result.add("\n}"); } } const data = new Map(); if (chunkInitFragments.length > 0) data.set("chunkInitFragments", chunkInitFragments); data.set("topLevelDeclarations", topLevelDeclarations); /** @type {CodeGenerationResult} */ const resultEntry = { sources: new Map([["javascript", new CachedSource(result)]]), data, runtimeRequirements }; return resultEntry; } /** * @param {Map} modulesMap modulesMap * @param {ModuleInfo} info info * @param {DependencyTemplates} dependencyTemplates dependencyTemplates * @param {RuntimeTemplate} runtimeTemplate runtimeTemplate * @param {ModuleGraph} moduleGraph moduleGraph * @param {ChunkGraph} chunkGraph chunkGraph * @param {RuntimeSpec} runtime runtime * @param {CodeGenerationResults} codeGenerationResults codeGenerationResults */ _analyseModule( modulesMap, info, dependencyTemplates, runtimeTemplate, moduleGraph, chunkGraph, runtime, codeGenerationResults ) { if (info.type === "concatenated") { const m = info.module; try { // Create a concatenation scope to track and capture information const concatenationScope = new ConcatenationScope(modulesMap, info); // TODO cache codeGeneration results const codeGenResult = m.codeGeneration({ dependencyTemplates, runtimeTemplate, moduleGraph, chunkGraph, runtime, concatenationScope, codeGenerationResults, sourceTypes: TYPES }); const source = codeGenResult.sources.get("javascript"); const data = codeGenResult.data; const chunkInitFragments = data && data.get("chunkInitFragments"); const code = source.source().toString(); let ast; try { ast = JavascriptParser._parse(code, { sourceType: "module" }); } catch (err) { if ( err.loc && typeof err.loc === "object" && typeof err.loc.line === "number" ) { const lineNumber = err.loc.line; const lines = code.split("\n"); err.message += "\n| " + lines .slice(Math.max(0, lineNumber - 3), lineNumber + 2) .join("\n| "); } throw err; } const scopeManager = eslintScope.analyze(ast, { ecmaVersion: 6, sourceType: "module", optimistic: true, ignoreEval: true, impliedStrict: true }); const globalScope = scopeManager.acquire(ast); const moduleScope = globalScope.childScopes[0]; const resultSource = new ReplaceSource(source); info.runtimeRequirements = codeGenResult.runtimeRequirements; info.ast = ast; info.internalSource = source; info.source = resultSource; info.chunkInitFragments = chunkInitFragments; info.globalScope = globalScope; info.moduleScope = moduleScope; } catch (err) { err.message += `\nwhile analyzing module ${m.identifier()} for concatenation`; throw err; } } } /** * @param {ModuleGraph} moduleGraph the module graph * @param {RuntimeSpec} runtime the runtime * @returns {[ModuleInfoOrReference[], Map]} module info items */ _getModulesWithInfo(moduleGraph, runtime) { const orderedConcatenationList = this._createConcatenationList( this.rootModule, this._modules, runtime, moduleGraph ); /** @type {Map} */ const map = new Map(); const list = orderedConcatenationList.map((info, index) => { let item = map.get(info.module); if (item === undefined) { switch (info.type) { case "concatenated": item = { type: "concatenated", module: info.module, index, ast: undefined, internalSource: undefined, runtimeRequirements: undefined, source: undefined, globalScope: undefined, moduleScope: undefined, internalNames: new Map(), exportMap: undefined, rawExportMap: undefined, namespaceExportSymbol: undefined, namespaceObjectName: undefined, interopNamespaceObjectUsed: false, interopNamespaceObjectName: undefined, interopNamespaceObject2Used: false, interopNamespaceObject2Name: undefined, interopDefaultAccessUsed: false, interopDefaultAccessName: undefined }; break; case "external": item = { type: "external", module: info.module, runtimeCondition: info.runtimeCondition, index, name: undefined, interopNamespaceObjectUsed: false, interopNamespaceObjectName: undefined, interopNamespaceObject2Used: false, interopNamespaceObject2Name: undefined, interopDefaultAccessUsed: false, interopDefaultAccessName: undefined }; break; default: throw new Error( `Unsupported concatenation entry type ${info.type}` ); } map.set(item.module, item); return item; } else { /** @type {ReferenceToModuleInfo} */ const ref = { type: "reference", runtimeCondition: info.runtimeCondition, target: item }; return ref; } }); return [list, map]; } findNewName(oldName, usedNamed1, usedNamed2, extraInfo) { let name = oldName; if (name === ConcatenationScope.DEFAULT_EXPORT) { name = ""; } if (name === ConcatenationScope.NAMESPACE_OBJECT_EXPORT) { name = "namespaceObject"; } // Remove uncool stuff extraInfo = extraInfo.replace( /\.+\/|(\/index)?\.([a-zA-Z0-9]{1,4})($|\s|\?)|\s*\+\s*\d+\s*modules/g, "" ); const splittedInfo = extraInfo.split("/"); while (splittedInfo.length) { name = splittedInfo.pop() + (name ? "_" + name : ""); const nameIdent = Template.toIdentifier(name); if ( !usedNamed1.has(nameIdent) && (!usedNamed2 || !usedNamed2.has(nameIdent)) ) return nameIdent; } let i = 0; let nameWithNumber = Template.toIdentifier(`${name}_${i}`); while ( usedNamed1.has(nameWithNumber) || (usedNamed2 && usedNamed2.has(nameWithNumber)) ) { i++; nameWithNumber = Template.toIdentifier(`${name}_${i}`); } return nameWithNumber; } /** * @param {Hash} hash the hash used to track dependencies * @param {UpdateHashContext} context context * @returns {void} */ updateHash(hash, context) { const { chunkGraph, runtime } = context; for (const info of this._createConcatenationList( this.rootModule, this._modules, intersectRuntime(runtime, this._runtime), chunkGraph.moduleGraph )) { switch (info.type) { case "concatenated": info.module.updateHash(hash, context); break; case "external": hash.update(`${chunkGraph.getModuleId(info.module)}`); // TODO runtimeCondition break; } } super.updateHash(hash, context); } static deserialize(context) { const obj = new ConcatenatedModule({ identifier: undefined, rootModule: undefined, modules: undefined, runtime: undefined }); obj.deserialize(context); return obj; } } makeSerializable(ConcatenatedModule, "webpack/lib/optimize/ConcatenatedModule"); module.exports = ConcatenatedModule; EnsureChunkConditionsPlugin.js000066600000004750150441747060012567 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_BASIC } = require("../OptimizationStages"); /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../ChunkGroup")} ChunkGroup */ /** @typedef {import("../Compiler")} Compiler */ class EnsureChunkConditionsPlugin { /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap( "EnsureChunkConditionsPlugin", compilation => { const handler = chunks => { const chunkGraph = compilation.chunkGraph; // These sets are hoisted here to save memory // They are cleared at the end of every loop /** @type {Set} */ const sourceChunks = new Set(); /** @type {Set} */ const chunkGroups = new Set(); for (const module of compilation.modules) { if (!module.hasChunkCondition()) continue; for (const chunk of chunkGraph.getModuleChunksIterable(module)) { if (!module.chunkCondition(chunk, compilation)) { sourceChunks.add(chunk); for (const group of chunk.groupsIterable) { chunkGroups.add(group); } } } if (sourceChunks.size === 0) continue; /** @type {Set} */ const targetChunks = new Set(); chunkGroupLoop: for (const chunkGroup of chunkGroups) { // Can module be placed in a chunk of this group? for (const chunk of chunkGroup.chunks) { if (module.chunkCondition(chunk, compilation)) { targetChunks.add(chunk); continue chunkGroupLoop; } } // We reached the entrypoint: fail if (chunkGroup.isInitial()) { throw new Error( "Cannot fullfil chunk condition of " + module.identifier() ); } // Try placing in all parents for (const group of chunkGroup.parentsIterable) { chunkGroups.add(group); } } for (const sourceChunk of sourceChunks) { chunkGraph.disconnectChunkAndModule(sourceChunk, module); } for (const targetChunk of targetChunks) { chunkGraph.connectChunkAndModule(targetChunk, module); } sourceChunks.clear(); chunkGroups.clear(); } }; compilation.hooks.optimizeChunks.tap( { name: "EnsureChunkConditionsPlugin", stage: STAGE_BASIC }, handler ); } ); } } module.exports = EnsureChunkConditionsPlugin; AggressiveSplittingPlugin.js000066600000023647150441747060012306 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_ADVANCED } = require("../OptimizationStages"); const { intersect } = require("../util/SetHelpers"); const { compareModulesByIdentifier, compareChunks } = require("../util/comparators"); const createSchemaValidation = require("../util/create-schema-validation"); const identifierUtils = require("../util/identifier"); /** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */ /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../ChunkGraph")} ChunkGraph */ /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Module")} Module */ const validate = createSchemaValidation( require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check.js"), () => require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"), { name: "Aggressive Splitting Plugin", baseDataPath: "options" } ); const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => { return module => { chunkGraph.disconnectChunkAndModule(oldChunk, module); chunkGraph.connectChunkAndModule(newChunk, module); }; }; /** * @param {ChunkGraph} chunkGraph the chunk graph * @param {Chunk} chunk the chunk * @returns {function(Module): boolean} filter for entry module */ const isNotAEntryModule = (chunkGraph, chunk) => { return module => { return !chunkGraph.isEntryModuleInChunk(module, chunk); }; }; /** @type {WeakSet} */ const recordedChunks = new WeakSet(); class AggressiveSplittingPlugin { /** * @param {AggressiveSplittingPluginOptions=} options options object */ constructor(options = {}) { validate(options); this.options = options; if (typeof this.options.minSize !== "number") { this.options.minSize = 30 * 1024; } if (typeof this.options.maxSize !== "number") { this.options.maxSize = 50 * 1024; } if (typeof this.options.chunkOverhead !== "number") { this.options.chunkOverhead = 0; } if (typeof this.options.entryChunkMultiplicator !== "number") { this.options.entryChunkMultiplicator = 1; } } /** * @param {Chunk} chunk the chunk to test * @returns {boolean} true if the chunk was recorded */ static wasChunkRecorded(chunk) { return recordedChunks.has(chunk); } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { compiler.hooks.thisCompilation.tap( "AggressiveSplittingPlugin", compilation => { let needAdditionalSeal = false; let newSplits; let fromAggressiveSplittingSet; let chunkSplitDataMap; compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => { newSplits = []; fromAggressiveSplittingSet = new Set(); chunkSplitDataMap = new Map(); }); compilation.hooks.optimizeChunks.tap( { name: "AggressiveSplittingPlugin", stage: STAGE_ADVANCED }, chunks => { const chunkGraph = compilation.chunkGraph; // Precompute stuff const nameToModuleMap = new Map(); const moduleToNameMap = new Map(); const makePathsRelative = identifierUtils.makePathsRelative.bindContextCache( compiler.context, compiler.root ); for (const m of compilation.modules) { const name = makePathsRelative(m.identifier()); nameToModuleMap.set(name, m); moduleToNameMap.set(m, name); } // Check used chunk ids const usedIds = new Set(); for (const chunk of chunks) { usedIds.add(chunk.id); } const recordedSplits = (compilation.records && compilation.records.aggressiveSplits) || []; const usedSplits = newSplits ? recordedSplits.concat(newSplits) : recordedSplits; const minSize = this.options.minSize; const maxSize = this.options.maxSize; const applySplit = splitData => { // Cannot split if id is already taken if (splitData.id !== undefined && usedIds.has(splitData.id)) { return false; } // Get module objects from names const selectedModules = splitData.modules.map(name => nameToModuleMap.get(name) ); // Does the modules exist at all? if (!selectedModules.every(Boolean)) return false; // Check if size matches (faster than waiting for hash) let size = 0; for (const m of selectedModules) size += m.size(); if (size !== splitData.size) return false; // get chunks with all modules const selectedChunks = intersect( selectedModules.map( m => new Set(chunkGraph.getModuleChunksIterable(m)) ) ); // No relevant chunks found if (selectedChunks.size === 0) return false; // The found chunk is already the split or similar if ( selectedChunks.size === 1 && chunkGraph.getNumberOfChunkModules( Array.from(selectedChunks)[0] ) === selectedModules.length ) { const chunk = Array.from(selectedChunks)[0]; if (fromAggressiveSplittingSet.has(chunk)) return false; fromAggressiveSplittingSet.add(chunk); chunkSplitDataMap.set(chunk, splitData); return true; } // split the chunk into two parts const newChunk = compilation.addChunk(); newChunk.chunkReason = "aggressive splitted"; for (const chunk of selectedChunks) { selectedModules.forEach( moveModuleBetween(chunkGraph, chunk, newChunk) ); chunk.split(newChunk); chunk.name = null; } fromAggressiveSplittingSet.add(newChunk); chunkSplitDataMap.set(newChunk, splitData); if (splitData.id !== null && splitData.id !== undefined) { newChunk.id = splitData.id; newChunk.ids = [splitData.id]; } return true; }; // try to restore to recorded splitting let changed = false; for (let j = 0; j < usedSplits.length; j++) { const splitData = usedSplits[j]; if (applySplit(splitData)) changed = true; } // for any chunk which isn't splitted yet, split it and create a new entry // start with the biggest chunk const cmpFn = compareChunks(chunkGraph); const sortedChunks = Array.from(chunks).sort((a, b) => { const diff1 = chunkGraph.getChunkModulesSize(b) - chunkGraph.getChunkModulesSize(a); if (diff1) return diff1; const diff2 = chunkGraph.getNumberOfChunkModules(a) - chunkGraph.getNumberOfChunkModules(b); if (diff2) return diff2; return cmpFn(a, b); }); for (const chunk of sortedChunks) { if (fromAggressiveSplittingSet.has(chunk)) continue; const size = chunkGraph.getChunkModulesSize(chunk); if ( size > maxSize && chunkGraph.getNumberOfChunkModules(chunk) > 1 ) { const modules = chunkGraph .getOrderedChunkModules(chunk, compareModulesByIdentifier) .filter(isNotAEntryModule(chunkGraph, chunk)); const selectedModules = []; let selectedModulesSize = 0; for (let k = 0; k < modules.length; k++) { const module = modules[k]; const newSize = selectedModulesSize + module.size(); if (newSize > maxSize && selectedModulesSize >= minSize) { break; } selectedModulesSize = newSize; selectedModules.push(module); } if (selectedModules.length === 0) continue; const splitData = { modules: selectedModules .map(m => moduleToNameMap.get(m)) .sort(), size: selectedModulesSize }; if (applySplit(splitData)) { newSplits = (newSplits || []).concat(splitData); changed = true; } } } if (changed) return true; } ); compilation.hooks.recordHash.tap( "AggressiveSplittingPlugin", records => { // 4. save made splittings to records const allSplits = new Set(); const invalidSplits = new Set(); // Check if some splittings are invalid // We remove invalid splittings and try again for (const chunk of compilation.chunks) { const splitData = chunkSplitDataMap.get(chunk); if (splitData !== undefined) { if (splitData.hash && chunk.hash !== splitData.hash) { // Split was successful, but hash doesn't equal // We can throw away the split since it's useless now invalidSplits.add(splitData); } } } if (invalidSplits.size > 0) { records.aggressiveSplits = records.aggressiveSplits.filter( splitData => !invalidSplits.has(splitData) ); needAdditionalSeal = true; } else { // set hash and id values on all (new) splittings for (const chunk of compilation.chunks) { const splitData = chunkSplitDataMap.get(chunk); if (splitData !== undefined) { splitData.hash = chunk.hash; splitData.id = chunk.id; allSplits.add(splitData); // set flag for stats recordedChunks.add(chunk); } } // Also add all unused historical splits (after the used ones) // They can still be used in some future compilation const recordedSplits = compilation.records && compilation.records.aggressiveSplits; if (recordedSplits) { for (const splitData of recordedSplits) { if (!invalidSplits.has(splitData)) allSplits.add(splitData); } } // record all splits records.aggressiveSplits = Array.from(allSplits); needAdditionalSeal = false; } } ); compilation.hooks.needAdditionalSeal.tap( "AggressiveSplittingPlugin", () => { if (needAdditionalSeal) { needAdditionalSeal = false; return true; } } ); } ); } } module.exports = AggressiveSplittingPlugin; ModuleConcatenationPlugin.js000066600000065151150441747060012240 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const asyncLib = require("neo-async"); const ChunkGraph = require("../ChunkGraph"); const ModuleGraph = require("../ModuleGraph"); const { STAGE_DEFAULT } = require("../OptimizationStages"); const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency"); const { compareModulesByIdentifier } = require("../util/comparators"); const { intersectRuntime, mergeRuntimeOwned, filterRuntime, runtimeToString, mergeRuntime } = require("../util/runtime"); const ConcatenatedModule = require("./ConcatenatedModule"); /** @typedef {import("../Compilation")} Compilation */ /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Module")} Module */ /** @typedef {import("../RequestShortener")} RequestShortener */ /** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */ /** * @typedef {Object} Statistics * @property {number} cached * @property {number} alreadyInConfig * @property {number} invalidModule * @property {number} incorrectChunks * @property {number} incorrectDependency * @property {number} incorrectModuleDependency * @property {number} incorrectChunksOfImporter * @property {number} incorrectRuntimeCondition * @property {number} importerFailed * @property {number} added */ const formatBailoutReason = msg => { return "ModuleConcatenation bailout: " + msg; }; class ModuleConcatenationPlugin { constructor(options) { if (typeof options !== "object") options = {}; this.options = options; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { const { _backCompat: backCompat } = compiler; compiler.hooks.compilation.tap("ModuleConcatenationPlugin", compilation => { if (compilation.moduleMemCaches) { throw new Error( "optimization.concatenateModules can't be used with cacheUnaffected as module concatenation is a global effect" ); } const moduleGraph = compilation.moduleGraph; const bailoutReasonMap = new Map(); const setBailoutReason = (module, reason) => { setInnerBailoutReason(module, reason); moduleGraph .getOptimizationBailout(module) .push( typeof reason === "function" ? rs => formatBailoutReason(reason(rs)) : formatBailoutReason(reason) ); }; const setInnerBailoutReason = (module, reason) => { bailoutReasonMap.set(module, reason); }; const getInnerBailoutReason = (module, requestShortener) => { const reason = bailoutReasonMap.get(module); if (typeof reason === "function") return reason(requestShortener); return reason; }; const formatBailoutWarning = (module, problem) => requestShortener => { if (typeof problem === "function") { return formatBailoutReason( `Cannot concat with ${module.readableIdentifier( requestShortener )}: ${problem(requestShortener)}` ); } const reason = getInnerBailoutReason(module, requestShortener); const reasonWithPrefix = reason ? `: ${reason}` : ""; if (module === problem) { return formatBailoutReason( `Cannot concat with ${module.readableIdentifier( requestShortener )}${reasonWithPrefix}` ); } else { return formatBailoutReason( `Cannot concat with ${module.readableIdentifier( requestShortener )} because of ${problem.readableIdentifier( requestShortener )}${reasonWithPrefix}` ); } }; compilation.hooks.optimizeChunkModules.tapAsync( { name: "ModuleConcatenationPlugin", stage: STAGE_DEFAULT }, (allChunks, modules, callback) => { const logger = compilation.getLogger( "webpack.ModuleConcatenationPlugin" ); const { chunkGraph, moduleGraph } = compilation; const relevantModules = []; const possibleInners = new Set(); const context = { chunkGraph, moduleGraph }; logger.time("select relevant modules"); for (const module of modules) { let canBeRoot = true; let canBeInner = true; const bailoutReason = module.getConcatenationBailoutReason(context); if (bailoutReason) { setBailoutReason(module, bailoutReason); continue; } // Must not be an async module if (moduleGraph.isAsync(module)) { setBailoutReason(module, `Module is async`); continue; } // Must be in strict mode if (!module.buildInfo.strict) { setBailoutReason(module, `Module is not in strict mode`); continue; } // Module must be in any chunk (we don't want to do useless work) if (chunkGraph.getNumberOfModuleChunks(module) === 0) { setBailoutReason(module, "Module is not in any chunk"); continue; } // Exports must be known (and not dynamic) const exportsInfo = moduleGraph.getExportsInfo(module); const relevantExports = exportsInfo.getRelevantExports(undefined); const unknownReexports = relevantExports.filter(exportInfo => { return ( exportInfo.isReexport() && !exportInfo.getTarget(moduleGraph) ); }); if (unknownReexports.length > 0) { setBailoutReason( module, `Reexports in this module do not have a static target (${Array.from( unknownReexports, exportInfo => `${ exportInfo.name || "other exports" }: ${exportInfo.getUsedInfo()}` ).join(", ")})` ); continue; } // Root modules must have a static list of exports const unknownProvidedExports = relevantExports.filter( exportInfo => { return exportInfo.provided !== true; } ); if (unknownProvidedExports.length > 0) { setBailoutReason( module, `List of module exports is dynamic (${Array.from( unknownProvidedExports, exportInfo => `${ exportInfo.name || "other exports" }: ${exportInfo.getProvidedInfo()} and ${exportInfo.getUsedInfo()}` ).join(", ")})` ); canBeRoot = false; } // Module must not be an entry point if (chunkGraph.isEntryModule(module)) { setInnerBailoutReason(module, "Module is an entry point"); canBeInner = false; } if (canBeRoot) relevantModules.push(module); if (canBeInner) possibleInners.add(module); } logger.timeEnd("select relevant modules"); logger.debug( `${relevantModules.length} potential root modules, ${possibleInners.size} potential inner modules` ); // sort by depth // modules with lower depth are more likely suited as roots // this improves performance, because modules already selected as inner are skipped logger.time("sort relevant modules"); relevantModules.sort((a, b) => { return moduleGraph.getDepth(a) - moduleGraph.getDepth(b); }); logger.timeEnd("sort relevant modules"); /** @type {Statistics} */ const stats = { cached: 0, alreadyInConfig: 0, invalidModule: 0, incorrectChunks: 0, incorrectDependency: 0, incorrectModuleDependency: 0, incorrectChunksOfImporter: 0, incorrectRuntimeCondition: 0, importerFailed: 0, added: 0 }; let statsCandidates = 0; let statsSizeSum = 0; let statsEmptyConfigurations = 0; logger.time("find modules to concatenate"); const concatConfigurations = []; const usedAsInner = new Set(); for (const currentRoot of relevantModules) { // when used by another configuration as inner: // the other configuration is better and we can skip this one // TODO reconsider that when it's only used in a different runtime if (usedAsInner.has(currentRoot)) continue; let chunkRuntime = undefined; for (const r of chunkGraph.getModuleRuntimes(currentRoot)) { chunkRuntime = mergeRuntimeOwned(chunkRuntime, r); } const exportsInfo = moduleGraph.getExportsInfo(currentRoot); const filteredRuntime = filterRuntime(chunkRuntime, r => exportsInfo.isModuleUsed(r) ); const activeRuntime = filteredRuntime === true ? chunkRuntime : filteredRuntime === false ? undefined : filteredRuntime; // create a configuration with the root const currentConfiguration = new ConcatConfiguration( currentRoot, activeRuntime ); // cache failures to add modules const failureCache = new Map(); // potential optional import candidates /** @type {Set} */ const candidates = new Set(); // try to add all imports for (const imp of this._getImports( compilation, currentRoot, activeRuntime )) { candidates.add(imp); } for (const imp of candidates) { const impCandidates = new Set(); const problem = this._tryToAdd( compilation, currentConfiguration, imp, chunkRuntime, activeRuntime, possibleInners, impCandidates, failureCache, chunkGraph, true, stats ); if (problem) { failureCache.set(imp, problem); currentConfiguration.addWarning(imp, problem); } else { for (const c of impCandidates) { candidates.add(c); } } } statsCandidates += candidates.size; if (!currentConfiguration.isEmpty()) { const modules = currentConfiguration.getModules(); statsSizeSum += modules.size; concatConfigurations.push(currentConfiguration); for (const module of modules) { if (module !== currentConfiguration.rootModule) { usedAsInner.add(module); } } } else { statsEmptyConfigurations++; const optimizationBailouts = moduleGraph.getOptimizationBailout(currentRoot); for (const warning of currentConfiguration.getWarningsSorted()) { optimizationBailouts.push( formatBailoutWarning(warning[0], warning[1]) ); } } } logger.timeEnd("find modules to concatenate"); logger.debug( `${ concatConfigurations.length } successful concat configurations (avg size: ${ statsSizeSum / concatConfigurations.length }), ${statsEmptyConfigurations} bailed out completely` ); logger.debug( `${statsCandidates} candidates were considered for adding (${stats.cached} cached failure, ${stats.alreadyInConfig} already in config, ${stats.invalidModule} invalid module, ${stats.incorrectChunks} incorrect chunks, ${stats.incorrectDependency} incorrect dependency, ${stats.incorrectChunksOfImporter} incorrect chunks of importer, ${stats.incorrectModuleDependency} incorrect module dependency, ${stats.incorrectRuntimeCondition} incorrect runtime condition, ${stats.importerFailed} importer failed, ${stats.added} added)` ); // HACK: Sort configurations by length and start with the longest one // to get the biggest groups possible. Used modules are marked with usedModules // TODO: Allow to reuse existing configuration while trying to add dependencies. // This would improve performance. O(n^2) -> O(n) logger.time(`sort concat configurations`); concatConfigurations.sort((a, b) => { return b.modules.size - a.modules.size; }); logger.timeEnd(`sort concat configurations`); const usedModules = new Set(); logger.time("create concatenated modules"); asyncLib.each( concatConfigurations, (concatConfiguration, callback) => { const rootModule = concatConfiguration.rootModule; // Avoid overlapping configurations // TODO: remove this when todo above is fixed if (usedModules.has(rootModule)) return callback(); const modules = concatConfiguration.getModules(); for (const m of modules) { usedModules.add(m); } // Create a new ConcatenatedModule let newModule = ConcatenatedModule.create( rootModule, modules, concatConfiguration.runtime, compiler.root, compilation.outputOptions.hashFunction ); const build = () => { newModule.build( compiler.options, compilation, null, null, err => { if (err) { if (!err.module) { err.module = newModule; } return callback(err); } integrate(); } ); }; const integrate = () => { if (backCompat) { ChunkGraph.setChunkGraphForModule(newModule, chunkGraph); ModuleGraph.setModuleGraphForModule(newModule, moduleGraph); } for (const warning of concatConfiguration.getWarningsSorted()) { moduleGraph .getOptimizationBailout(newModule) .push(formatBailoutWarning(warning[0], warning[1])); } moduleGraph.cloneModuleAttributes(rootModule, newModule); for (const m of modules) { // add to builtModules when one of the included modules was built if (compilation.builtModules.has(m)) { compilation.builtModules.add(newModule); } if (m !== rootModule) { // attach external references to the concatenated module too moduleGraph.copyOutgoingModuleConnections( m, newModule, c => { return ( c.originModule === m && !( c.dependency instanceof HarmonyImportDependency && modules.has(c.module) ) ); } ); // remove module from chunk for (const chunk of chunkGraph.getModuleChunksIterable( rootModule )) { const sourceTypes = chunkGraph.getChunkModuleSourceTypes( chunk, m ); if (sourceTypes.size === 1) { chunkGraph.disconnectChunkAndModule(chunk, m); } else { const newSourceTypes = new Set(sourceTypes); newSourceTypes.delete("javascript"); chunkGraph.setChunkModuleSourceTypes( chunk, m, newSourceTypes ); } } } } compilation.modules.delete(rootModule); ChunkGraph.clearChunkGraphForModule(rootModule); ModuleGraph.clearModuleGraphForModule(rootModule); // remove module from chunk chunkGraph.replaceModule(rootModule, newModule); // replace module references with the concatenated module moduleGraph.moveModuleConnections(rootModule, newModule, c => { const otherModule = c.module === rootModule ? c.originModule : c.module; const innerConnection = c.dependency instanceof HarmonyImportDependency && modules.has(otherModule); return !innerConnection; }); // add concatenated module to the compilation compilation.modules.add(newModule); callback(); }; build(); }, err => { logger.timeEnd("create concatenated modules"); process.nextTick(callback.bind(null, err)); } ); } ); }); } /** * @param {Compilation} compilation the compilation * @param {Module} module the module to be added * @param {RuntimeSpec} runtime the runtime scope * @returns {Set} the imported modules */ _getImports(compilation, module, runtime) { const moduleGraph = compilation.moduleGraph; const set = new Set(); for (const dep of module.dependencies) { // Get reference info only for harmony Dependencies if (!(dep instanceof HarmonyImportDependency)) continue; const connection = moduleGraph.getConnection(dep); // Reference is valid and has a module if ( !connection || !connection.module || !connection.isTargetActive(runtime) ) { continue; } const importedNames = compilation.getDependencyReferencedExports( dep, undefined ); if ( importedNames.every(i => Array.isArray(i) ? i.length > 0 : i.name.length > 0 ) || Array.isArray(moduleGraph.getProvidedExports(module)) ) { set.add(connection.module); } } return set; } /** * @param {Compilation} compilation webpack compilation * @param {ConcatConfiguration} config concat configuration (will be modified when added) * @param {Module} module the module to be added * @param {RuntimeSpec} runtime the runtime scope of the generated code * @param {RuntimeSpec} activeRuntime the runtime scope of the root module * @param {Set} possibleModules modules that are candidates * @param {Set} candidates list of potential candidates (will be added to) * @param {Map} failureCache cache for problematic modules to be more performant * @param {ChunkGraph} chunkGraph the chunk graph * @param {boolean} avoidMutateOnFailure avoid mutating the config when adding fails * @param {Statistics} statistics gathering metrics * @returns {Module | function(RequestShortener): string} the problematic module */ _tryToAdd( compilation, config, module, runtime, activeRuntime, possibleModules, candidates, failureCache, chunkGraph, avoidMutateOnFailure, statistics ) { const cacheEntry = failureCache.get(module); if (cacheEntry) { statistics.cached++; return cacheEntry; } // Already added? if (config.has(module)) { statistics.alreadyInConfig++; return null; } // Not possible to add? if (!possibleModules.has(module)) { statistics.invalidModule++; failureCache.set(module, module); // cache failures for performance return module; } // Module must be in the correct chunks const missingChunks = Array.from( chunkGraph.getModuleChunksIterable(config.rootModule) ).filter(chunk => !chunkGraph.isModuleInChunk(module, chunk)); if (missingChunks.length > 0) { const problem = requestShortener => { const missingChunksList = Array.from( new Set(missingChunks.map(chunk => chunk.name || "unnamed chunk(s)")) ).sort(); const chunks = Array.from( new Set( Array.from(chunkGraph.getModuleChunksIterable(module)).map( chunk => chunk.name || "unnamed chunk(s)" ) ) ).sort(); return `Module ${module.readableIdentifier( requestShortener )} is not in the same chunk(s) (expected in chunk(s) ${missingChunksList.join( ", " )}, module is in chunk(s) ${chunks.join(", ")})`; }; statistics.incorrectChunks++; failureCache.set(module, problem); // cache failures for performance return problem; } const moduleGraph = compilation.moduleGraph; const incomingConnections = moduleGraph.getIncomingConnectionsByOriginModule(module); const incomingConnectionsFromNonModules = incomingConnections.get(null) || incomingConnections.get(undefined); if (incomingConnectionsFromNonModules) { const activeNonModulesConnections = incomingConnectionsFromNonModules.filter(connection => { // We are not interested in inactive connections // or connections without dependency return connection.isActive(runtime); }); if (activeNonModulesConnections.length > 0) { const problem = requestShortener => { const importingExplanations = new Set( activeNonModulesConnections.map(c => c.explanation).filter(Boolean) ); const explanations = Array.from(importingExplanations).sort(); return `Module ${module.readableIdentifier( requestShortener )} is referenced ${ explanations.length > 0 ? `by: ${explanations.join(", ")}` : "in an unsupported way" }`; }; statistics.incorrectDependency++; failureCache.set(module, problem); // cache failures for performance return problem; } } /** @type {Map} */ const incomingConnectionsFromModules = new Map(); for (const [originModule, connections] of incomingConnections) { if (originModule) { // Ignore connection from orphan modules if (chunkGraph.getNumberOfModuleChunks(originModule) === 0) continue; // We don't care for connections from other runtimes let originRuntime = undefined; for (const r of chunkGraph.getModuleRuntimes(originModule)) { originRuntime = mergeRuntimeOwned(originRuntime, r); } if (!intersectRuntime(runtime, originRuntime)) continue; // We are not interested in inactive connections const activeConnections = connections.filter(connection => connection.isActive(runtime) ); if (activeConnections.length > 0) incomingConnectionsFromModules.set(originModule, activeConnections); } } const incomingModules = Array.from(incomingConnectionsFromModules.keys()); // Module must be in the same chunks like the referencing module const otherChunkModules = incomingModules.filter(originModule => { for (const chunk of chunkGraph.getModuleChunksIterable( config.rootModule )) { if (!chunkGraph.isModuleInChunk(originModule, chunk)) { return true; } } return false; }); if (otherChunkModules.length > 0) { const problem = requestShortener => { const names = otherChunkModules .map(m => m.readableIdentifier(requestShortener)) .sort(); return `Module ${module.readableIdentifier( requestShortener )} is referenced from different chunks by these modules: ${names.join( ", " )}`; }; statistics.incorrectChunksOfImporter++; failureCache.set(module, problem); // cache failures for performance return problem; } /** @type {Map} */ const nonHarmonyConnections = new Map(); for (const [originModule, connections] of incomingConnectionsFromModules) { const selected = connections.filter( connection => !connection.dependency || !(connection.dependency instanceof HarmonyImportDependency) ); if (selected.length > 0) nonHarmonyConnections.set(originModule, connections); } if (nonHarmonyConnections.size > 0) { const problem = requestShortener => { const names = Array.from(nonHarmonyConnections) .map(([originModule, connections]) => { return `${originModule.readableIdentifier( requestShortener )} (referenced with ${Array.from( new Set( connections .map(c => c.dependency && c.dependency.type) .filter(Boolean) ) ) .sort() .join(", ")})`; }) .sort(); return `Module ${module.readableIdentifier( requestShortener )} is referenced from these modules with unsupported syntax: ${names.join( ", " )}`; }; statistics.incorrectModuleDependency++; failureCache.set(module, problem); // cache failures for performance return problem; } if (runtime !== undefined && typeof runtime !== "string") { // Module must be consistently referenced in the same runtimes /** @type {{ originModule: Module, runtimeCondition: RuntimeSpec }[]} */ const otherRuntimeConnections = []; outer: for (const [ originModule, connections ] of incomingConnectionsFromModules) { /** @type {false | RuntimeSpec} */ let currentRuntimeCondition = false; for (const connection of connections) { const runtimeCondition = filterRuntime(runtime, runtime => { return connection.isTargetActive(runtime); }); if (runtimeCondition === false) continue; if (runtimeCondition === true) continue outer; if (currentRuntimeCondition !== false) { currentRuntimeCondition = mergeRuntime( currentRuntimeCondition, runtimeCondition ); } else { currentRuntimeCondition = runtimeCondition; } } if (currentRuntimeCondition !== false) { otherRuntimeConnections.push({ originModule, runtimeCondition: currentRuntimeCondition }); } } if (otherRuntimeConnections.length > 0) { const problem = requestShortener => { return `Module ${module.readableIdentifier( requestShortener )} is runtime-dependent referenced by these modules: ${Array.from( otherRuntimeConnections, ({ originModule, runtimeCondition }) => `${originModule.readableIdentifier( requestShortener )} (expected runtime ${runtimeToString( runtime )}, module is only referenced in ${runtimeToString( /** @type {RuntimeSpec} */ (runtimeCondition) )})` ).join(", ")}`; }; statistics.incorrectRuntimeCondition++; failureCache.set(module, problem); // cache failures for performance return problem; } } let backup; if (avoidMutateOnFailure) { backup = config.snapshot(); } // Add the module config.add(module); incomingModules.sort(compareModulesByIdentifier); // Every module which depends on the added module must be in the configuration too. for (const originModule of incomingModules) { const problem = this._tryToAdd( compilation, config, originModule, runtime, activeRuntime, possibleModules, candidates, failureCache, chunkGraph, false, statistics ); if (problem) { if (backup !== undefined) config.rollback(backup); statistics.importerFailed++; failureCache.set(module, problem); // cache failures for performance return problem; } } // Add imports to possible candidates list for (const imp of this._getImports(compilation, module, runtime)) { candidates.add(imp); } statistics.added++; return null; } } class ConcatConfiguration { /** * @param {Module} rootModule the root module * @param {RuntimeSpec} runtime the runtime */ constructor(rootModule, runtime) { this.rootModule = rootModule; this.runtime = runtime; /** @type {Set} */ this.modules = new Set(); this.modules.add(rootModule); /** @type {Map} */ this.warnings = new Map(); } add(module) { this.modules.add(module); } has(module) { return this.modules.has(module); } isEmpty() { return this.modules.size === 1; } addWarning(module, problem) { this.warnings.set(module, problem); } getWarningsSorted() { return new Map( Array.from(this.warnings).sort((a, b) => { const ai = a[0].identifier(); const bi = b[0].identifier(); if (ai < bi) return -1; if (ai > bi) return 1; return 0; }) ); } /** * @returns {Set} modules as set */ getModules() { return this.modules; } snapshot() { return this.modules.size; } rollback(snapshot) { const modules = this.modules; for (const m of modules) { if (snapshot === 0) { modules.delete(m); } else { snapshot--; } } } } module.exports = ModuleConcatenationPlugin; AggressiveMergingPlugin.js000066600000004374150441747060011715 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_ADVANCED } = require("../OptimizationStages"); /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../Compiler")} Compiler */ class AggressiveMergingPlugin { constructor(options) { if ( (options !== undefined && typeof options !== "object") || Array.isArray(options) ) { throw new Error( "Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/" ); } this.options = options || {}; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { const options = this.options; const minSizeReduce = options.minSizeReduce || 1.5; compiler.hooks.thisCompilation.tap( "AggressiveMergingPlugin", compilation => { compilation.hooks.optimizeChunks.tap( { name: "AggressiveMergingPlugin", stage: STAGE_ADVANCED }, chunks => { const chunkGraph = compilation.chunkGraph; /** @type {{a: Chunk, b: Chunk, improvement: number}[]} */ let combinations = []; for (const a of chunks) { if (a.canBeInitial()) continue; for (const b of chunks) { if (b.canBeInitial()) continue; if (b === a) break; if (!chunkGraph.canChunksBeIntegrated(a, b)) { continue; } const aSize = chunkGraph.getChunkSize(b, { chunkOverhead: 0 }); const bSize = chunkGraph.getChunkSize(a, { chunkOverhead: 0 }); const abSize = chunkGraph.getIntegratedChunksSize(b, a, { chunkOverhead: 0 }); const improvement = (aSize + bSize) / abSize; combinations.push({ a, b, improvement }); } } combinations.sort((a, b) => { return b.improvement - a.improvement; }); const pair = combinations[0]; if (!pair) return; if (pair.improvement < minSizeReduce) return; chunkGraph.integrateChunks(pair.b, pair.a); compilation.chunks.delete(pair.a); return true; } ); } ); } } module.exports = AggressiveMergingPlugin; SplitChunksPlugin.js000066600000152506150441747060010555 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const Chunk = require("../Chunk"); const { STAGE_ADVANCED } = require("../OptimizationStages"); const WebpackError = require("../WebpackError"); const { requestToId } = require("../ids/IdHelpers"); const { isSubset } = require("../util/SetHelpers"); const SortableSet = require("../util/SortableSet"); const { compareModulesByIdentifier, compareIterables } = require("../util/comparators"); const createHash = require("../util/createHash"); const deterministicGrouping = require("../util/deterministicGrouping"); const { makePathsRelative } = require("../util/identifier"); const memoize = require("../util/memoize"); const MinMaxSizeWarning = require("./MinMaxSizeWarning"); /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */ /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */ /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */ /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */ /** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */ /** @typedef {import("../ChunkGraph")} ChunkGraph */ /** @typedef {import("../ChunkGroup")} ChunkGroup */ /** @typedef {import("../Compilation").AssetInfo} AssetInfo */ /** @typedef {import("../Compilation").PathData} PathData */ /** @typedef {import("../Compiler")} Compiler */ /** @typedef {import("../Module")} Module */ /** @typedef {import("../ModuleGraph")} ModuleGraph */ /** @typedef {import("../util/deterministicGrouping").GroupedItems} DeterministicGroupingGroupedItemsForModule */ /** @typedef {import("../util/deterministicGrouping").Options} DeterministicGroupingOptionsForModule */ /** @typedef {Record} SplitChunksSizes */ /** * @callback ChunkFilterFunction * @param {Chunk} chunk * @returns {boolean} */ /** * @callback CombineSizeFunction * @param {number} a * @param {number} b * @returns {number} */ /** * @typedef {Object} CacheGroupSource * @property {string=} key * @property {number=} priority * @property {GetName=} getName * @property {ChunkFilterFunction=} chunksFilter * @property {boolean=} enforce * @property {SplitChunksSizes} minSize * @property {SplitChunksSizes} minSizeReduction * @property {SplitChunksSizes} minRemainingSize * @property {SplitChunksSizes} enforceSizeThreshold * @property {SplitChunksSizes} maxAsyncSize * @property {SplitChunksSizes} maxInitialSize * @property {number=} minChunks * @property {number=} maxAsyncRequests * @property {number=} maxInitialRequests * @property {(string | function(PathData, AssetInfo=): string)=} filename * @property {string=} idHint * @property {string} automaticNameDelimiter * @property {boolean=} reuseExistingChunk * @property {boolean=} usedExports */ /** * @typedef {Object} CacheGroup * @property {string} key * @property {number=} priority * @property {GetName=} getName * @property {ChunkFilterFunction=} chunksFilter * @property {SplitChunksSizes} minSize * @property {SplitChunksSizes} minSizeReduction * @property {SplitChunksSizes} minRemainingSize * @property {SplitChunksSizes} enforceSizeThreshold * @property {SplitChunksSizes} maxAsyncSize * @property {SplitChunksSizes} maxInitialSize * @property {number=} minChunks * @property {number=} maxAsyncRequests * @property {number=} maxInitialRequests * @property {(string | function(PathData, AssetInfo=): string)=} filename * @property {string=} idHint * @property {string} automaticNameDelimiter * @property {boolean} reuseExistingChunk * @property {boolean} usedExports * @property {boolean} _validateSize * @property {boolean} _validateRemainingSize * @property {SplitChunksSizes} _minSizeForMaxSize * @property {boolean} _conditionalEnforce */ /** * @typedef {Object} FallbackCacheGroup * @property {ChunkFilterFunction} chunksFilter * @property {SplitChunksSizes} minSize * @property {SplitChunksSizes} maxAsyncSize * @property {SplitChunksSizes} maxInitialSize * @property {string} automaticNameDelimiter */ /** * @typedef {Object} CacheGroupsContext * @property {ModuleGraph} moduleGraph * @property {ChunkGraph} chunkGraph */ /** * @callback GetCacheGroups * @param {Module} module * @param {CacheGroupsContext} context * @returns {CacheGroupSource[]} */ /** * @callback GetName * @param {Module=} module * @param {Chunk[]=} chunks * @param {string=} key * @returns {string=} */ /** * @typedef {Object} SplitChunksOptions * @property {ChunkFilterFunction} chunksFilter * @property {string[]} defaultSizeTypes * @property {SplitChunksSizes} minSize * @property {SplitChunksSizes} minSizeReduction * @property {SplitChunksSizes} minRemainingSize * @property {SplitChunksSizes} enforceSizeThreshold * @property {SplitChunksSizes} maxInitialSize * @property {SplitChunksSizes} maxAsyncSize * @property {number} minChunks * @property {number} maxAsyncRequests * @property {number} maxInitialRequests * @property {boolean} hidePathInfo * @property {string | function(PathData, AssetInfo=): string} filename * @property {string} automaticNameDelimiter * @property {GetCacheGroups} getCacheGroups * @property {GetName} getName * @property {boolean} usedExports * @property {FallbackCacheGroup} fallbackCacheGroup */ /** * @typedef {Object} ChunksInfoItem * @property {SortableSet} modules * @property {CacheGroup} cacheGroup * @property {number} cacheGroupIndex * @property {string} name * @property {Record} sizes * @property {Set} chunks * @property {Set} reuseableChunks * @property {Set} chunksKeys */ const defaultGetName = /** @type {GetName} */ (() => {}); const deterministicGroupingForModules = /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */ ( deterministicGrouping ); /** @type {WeakMap} */ const getKeyCache = new WeakMap(); /** * @param {string} name a filename to hash * @param {OutputOptions} outputOptions hash function used * @returns {string} hashed filename */ const hashFilename = (name, outputOptions) => { const digest = /** @type {string} */ ( createHash(outputOptions.hashFunction) .update(name) .digest(outputOptions.hashDigest) ); return digest.slice(0, 8); }; /** * @param {Chunk} chunk the chunk * @returns {number} the number of requests */ const getRequests = chunk => { let requests = 0; for (const chunkGroup of chunk.groupsIterable) { requests = Math.max(requests, chunkGroup.chunks.length); } return requests; }; const mapObject = (obj, fn) => { const newObj = Object.create(null); for (const key of Object.keys(obj)) { newObj[key] = fn(obj[key], key); } return newObj; }; /** * @template T * @param {Set} a set * @param {Set} b other set * @returns {boolean} true if at least one item of a is in b */ const isOverlap = (a, b) => { for (const item of a) { if (b.has(item)) return true; } return false; }; const compareModuleIterables = compareIterables(compareModulesByIdentifier); /** * @param {ChunksInfoItem} a item * @param {ChunksInfoItem} b item * @returns {number} compare result */ const compareEntries = (a, b) => { // 1. by priority const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority; if (diffPriority) return diffPriority; // 2. by number of chunks const diffCount = a.chunks.size - b.chunks.size; if (diffCount) return diffCount; // 3. by size reduction const aSizeReduce = totalSize(a.sizes) * (a.chunks.size - 1); const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1); const diffSizeReduce = aSizeReduce - bSizeReduce; if (diffSizeReduce) return diffSizeReduce; // 4. by cache group index const indexDiff = b.cacheGroupIndex - a.cacheGroupIndex; if (indexDiff) return indexDiff; // 5. by number of modules (to be able to compare by identifier) const modulesA = a.modules; const modulesB = b.modules; const diff = modulesA.size - modulesB.size; if (diff) return diff; // 6. by module identifiers modulesA.sort(); modulesB.sort(); return compareModuleIterables(modulesA, modulesB); }; const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial(); const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial(); const ALL_CHUNK_FILTER = chunk => true; /** * @param {OptimizationSplitChunksSizes} value the sizes * @param {string[]} defaultSizeTypes the default size types * @returns {SplitChunksSizes} normalized representation */ const normalizeSizes = (value, defaultSizeTypes) => { if (typeof value === "number") { /** @type {Record} */ const o = {}; for (const sizeType of defaultSizeTypes) o[sizeType] = value; return o; } else if (typeof value === "object" && value !== null) { return { ...value }; } else { return {}; } }; /** * @param {...SplitChunksSizes} sizes the sizes * @returns {SplitChunksSizes} the merged sizes */ const mergeSizes = (...sizes) => { /** @type {SplitChunksSizes} */ let merged = {}; for (let i = sizes.length - 1; i >= 0; i--) { merged = Object.assign(merged, sizes[i]); } return merged; }; /** * @param {SplitChunksSizes} sizes the sizes * @returns {boolean} true, if there are sizes > 0 */ const hasNonZeroSizes = sizes => { for (const key of Object.keys(sizes)) { if (sizes[key] > 0) return true; } return false; }; /** * @param {SplitChunksSizes} a first sizes * @param {SplitChunksSizes} b second sizes * @param {CombineSizeFunction} combine a function to combine sizes * @returns {SplitChunksSizes} the combine sizes */ const combineSizes = (a, b, combine) => { const aKeys = new Set(Object.keys(a)); const bKeys = new Set(Object.keys(b)); /** @type {SplitChunksSizes} */ const result = {}; for (const key of aKeys) { if (bKeys.has(key)) { result[key] = combine(a[key], b[key]); } else { result[key] = a[key]; } } for (const key of bKeys) { if (!aKeys.has(key)) { result[key] = b[key]; } } return result; }; /** * @param {SplitChunksSizes} sizes the sizes * @param {SplitChunksSizes} minSize the min sizes * @returns {boolean} true if there are sizes and all existing sizes are at least `minSize` */ const checkMinSize = (sizes, minSize) => { for (const key of Object.keys(minSize)) { const size = sizes[key]; if (size === undefined || size === 0) continue; if (size < minSize[key]) return false; } return true; }; /** * @param {SplitChunksSizes} sizes the sizes * @param {SplitChunksSizes} minSizeReduction the min sizes * @param {number} chunkCount number of chunks * @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction` */ const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => { for (const key of Object.keys(minSizeReduction)) { const size = sizes[key]; if (size === undefined || size === 0) continue; if (size * chunkCount < minSizeReduction[key]) return false; } return true; }; /** * @param {SplitChunksSizes} sizes the sizes * @param {SplitChunksSizes} minSize the min sizes * @returns {undefined | string[]} list of size types that are below min size */ const getViolatingMinSizes = (sizes, minSize) => { let list; for (const key of Object.keys(minSize)) { const size = sizes[key]; if (size === undefined || size === 0) continue; if (size < minSize[key]) { if (list === undefined) list = [key]; else list.push(key); } } return list; }; /** * @param {SplitChunksSizes} sizes the sizes * @returns {number} the total size */ const totalSize = sizes => { let size = 0; for (const key of Object.keys(sizes)) { size += sizes[key]; } return size; }; /** * @param {false|string|Function} name the chunk name * @returns {GetName} a function to get the name of the chunk */ const normalizeName = name => { if (typeof name === "string") { return () => name; } if (typeof name === "function") { return /** @type {GetName} */ (name); } }; /** * @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option * @returns {ChunkFilterFunction} the chunk filter function */ const normalizeChunksFilter = chunks => { if (chunks === "initial") { return INITIAL_CHUNK_FILTER; } if (chunks === "async") { return ASYNC_CHUNK_FILTER; } if (chunks === "all") { return ALL_CHUNK_FILTER; } if (typeof chunks === "function") { return chunks; } }; /** * @param {GetCacheGroups | Record} cacheGroups the cache group options * @param {string[]} defaultSizeTypes the default size types * @returns {GetCacheGroups} a function to get the cache groups */ const normalizeCacheGroups = (cacheGroups, defaultSizeTypes) => { if (typeof cacheGroups === "function") { return cacheGroups; } if (typeof cacheGroups === "object" && cacheGroups !== null) { /** @type {(function(Module, CacheGroupsContext, CacheGroupSource[]): void)[]} */ const handlers = []; for (const key of Object.keys(cacheGroups)) { const option = cacheGroups[key]; if (option === false) { continue; } if (typeof option === "string" || option instanceof RegExp) { const source = createCacheGroupSource({}, key, defaultSizeTypes); handlers.push((module, context, results) => { if (checkTest(option, module, context)) { results.push(source); } }); } else if (typeof option === "function") { const cache = new WeakMap(); handlers.push((module, context, results) => { const result = option(module); if (result) { const groups = Array.isArray(result) ? result : [result]; for (const group of groups) { const cachedSource = cache.get(group); if (cachedSource !== undefined) { results.push(cachedSource); } else { const source = createCacheGroupSource( group, key, defaultSizeTypes ); cache.set(group, source); results.push(source); } } } }); } else { const source = createCacheGroupSource(option, key, defaultSizeTypes); handlers.push((module, context, results) => { if ( checkTest(option.test, module, context) && checkModuleType(option.type, module) && checkModuleLayer(option.layer, module) ) { results.push(source); } }); } } /** * @param {Module} module the current module * @param {CacheGroupsContext} context the current context * @returns {CacheGroupSource[]} the matching cache groups */ const fn = (module, context) => { /** @type {CacheGroupSource[]} */ let results = []; for (const fn of handlers) { fn(module, context, results); } return results; }; return fn; } return () => null; }; /** * @param {undefined|boolean|string|RegExp|Function} test test option * @param {Module} module the module * @param {CacheGroupsContext} context context object * @returns {boolean} true, if the module should be selected */ const checkTest = (test, module, context) => { if (test === undefined) return true; if (typeof test === "function") { return test(module, context); } if (typeof test === "boolean") return test; if (typeof test === "string") { const name = module.nameForCondition(); return name && name.startsWith(test); } if (test instanceof RegExp) { const name = module.nameForCondition(); return name && test.test(name); } return false; }; /** * @param {undefined|string|RegExp|Function} test type option * @param {Module} module the module * @returns {boolean} true, if the module should be selected */ const checkModuleType = (test, module) => { if (test === undefined) return true; if (typeof test === "function") { return test(module.type); } if (typeof test === "string") { const type = module.type; return test === type; } if (test instanceof RegExp) { const type = module.type; return test.test(type); } return false; }; /** * @param {undefined|string|RegExp|Function} test type option * @param {Module} module the module * @returns {boolean} true, if the module should be selected */ const checkModuleLayer = (test, module) => { if (test === undefined) return true; if (typeof test === "function") { return test(module.layer); } if (typeof test === "string") { const layer = module.layer; return test === "" ? !layer : layer && layer.startsWith(test); } if (test instanceof RegExp) { const layer = module.layer; return test.test(layer); } return false; }; /** * @param {OptimizationSplitChunksCacheGroup} options the group options * @param {string} key key of cache group * @param {string[]} defaultSizeTypes the default size types * @returns {CacheGroupSource} the normalized cached group */ const createCacheGroupSource = (options, key, defaultSizeTypes) => { const minSize = normalizeSizes(options.minSize, defaultSizeTypes); const minSizeReduction = normalizeSizes( options.minSizeReduction, defaultSizeTypes ); const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes); return { key, priority: options.priority, getName: normalizeName(options.name), chunksFilter: normalizeChunksFilter(options.chunks), enforce: options.enforce, minSize, minSizeReduction, minRemainingSize: mergeSizes( normalizeSizes(options.minRemainingSize, defaultSizeTypes), minSize ), enforceSizeThreshold: normalizeSizes( options.enforceSizeThreshold, defaultSizeTypes ), maxAsyncSize: mergeSizes( normalizeSizes(options.maxAsyncSize, defaultSizeTypes), maxSize ), maxInitialSize: mergeSizes( normalizeSizes(options.maxInitialSize, defaultSizeTypes), maxSize ), minChunks: options.minChunks, maxAsyncRequests: options.maxAsyncRequests, maxInitialRequests: options.maxInitialRequests, filename: options.filename, idHint: options.idHint, automaticNameDelimiter: options.automaticNameDelimiter, reuseExistingChunk: options.reuseExistingChunk, usedExports: options.usedExports }; }; module.exports = class SplitChunksPlugin { /** * @param {OptimizationSplitChunksOptions=} options plugin options */ constructor(options = {}) { const defaultSizeTypes = options.defaultSizeTypes || [ "javascript", "unknown" ]; const fallbackCacheGroup = options.fallbackCacheGroup || {}; const minSize = normalizeSizes(options.minSize, defaultSizeTypes); const minSizeReduction = normalizeSizes( options.minSizeReduction, defaultSizeTypes ); const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes); /** @type {SplitChunksOptions} */ this.options = { chunksFilter: normalizeChunksFilter(options.chunks || "all"), defaultSizeTypes, minSize, minSizeReduction, minRemainingSize: mergeSizes( normalizeSizes(options.minRemainingSize, defaultSizeTypes), minSize ), enforceSizeThreshold: normalizeSizes( options.enforceSizeThreshold, defaultSizeTypes ), maxAsyncSize: mergeSizes( normalizeSizes(options.maxAsyncSize, defaultSizeTypes), maxSize ), maxInitialSize: mergeSizes( normalizeSizes(options.maxInitialSize, defaultSizeTypes), maxSize ), minChunks: options.minChunks || 1, maxAsyncRequests: options.maxAsyncRequests || 1, maxInitialRequests: options.maxInitialRequests || 1, hidePathInfo: options.hidePathInfo || false, filename: options.filename || undefined, getCacheGroups: normalizeCacheGroups( options.cacheGroups, defaultSizeTypes ), getName: options.name ? normalizeName(options.name) : defaultGetName, automaticNameDelimiter: options.automaticNameDelimiter, usedExports: options.usedExports, fallbackCacheGroup: { chunksFilter: normalizeChunksFilter( fallbackCacheGroup.chunks || options.chunks || "all" ), minSize: mergeSizes( normalizeSizes(fallbackCacheGroup.minSize, defaultSizeTypes), minSize ), maxAsyncSize: mergeSizes( normalizeSizes(fallbackCacheGroup.maxAsyncSize, defaultSizeTypes), normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes), normalizeSizes(options.maxAsyncSize, defaultSizeTypes), normalizeSizes(options.maxSize, defaultSizeTypes) ), maxInitialSize: mergeSizes( normalizeSizes(fallbackCacheGroup.maxInitialSize, defaultSizeTypes), normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes), normalizeSizes(options.maxInitialSize, defaultSizeTypes), normalizeSizes(options.maxSize, defaultSizeTypes) ), automaticNameDelimiter: fallbackCacheGroup.automaticNameDelimiter || options.automaticNameDelimiter || "~" } }; /** @type {WeakMap} */ this._cacheGroupCache = new WeakMap(); } /** * @param {CacheGroupSource} cacheGroupSource source * @returns {CacheGroup} the cache group (cached) */ _getCacheGroup(cacheGroupSource) { const cacheEntry = this._cacheGroupCache.get(cacheGroupSource); if (cacheEntry !== undefined) return cacheEntry; const minSize = mergeSizes( cacheGroupSource.minSize, cacheGroupSource.enforce ? undefined : this.options.minSize ); const minSizeReduction = mergeSizes( cacheGroupSource.minSizeReduction, cacheGroupSource.enforce ? undefined : this.options.minSizeReduction ); const minRemainingSize = mergeSizes( cacheGroupSource.minRemainingSize, cacheGroupSource.enforce ? undefined : this.options.minRemainingSize ); const enforceSizeThreshold = mergeSizes( cacheGroupSource.enforceSizeThreshold, cacheGroupSource.enforce ? undefined : this.options.enforceSizeThreshold ); const cacheGroup = { key: cacheGroupSource.key, priority: cacheGroupSource.priority || 0, chunksFilter: cacheGroupSource.chunksFilter || this.options.chunksFilter, minSize, minSizeReduction, minRemainingSize, enforceSizeThreshold, maxAsyncSize: mergeSizes( cacheGroupSource.maxAsyncSize, cacheGroupSource.enforce ? undefined : this.options.maxAsyncSize ), maxInitialSize: mergeSizes( cacheGroupSource.maxInitialSize, cacheGroupSource.enforce ? undefined : this.options.maxInitialSize ), minChunks: cacheGroupSource.minChunks !== undefined ? cacheGroupSource.minChunks : cacheGroupSource.enforce ? 1 : this.options.minChunks, maxAsyncRequests: cacheGroupSource.maxAsyncRequests !== undefined ? cacheGroupSource.maxAsyncRequests : cacheGroupSource.enforce ? Infinity : this.options.maxAsyncRequests, maxInitialRequests: cacheGroupSource.maxInitialRequests !== undefined ? cacheGroupSource.maxInitialRequests : cacheGroupSource.enforce ? Infinity : this.options.maxInitialRequests, getName: cacheGroupSource.getName !== undefined ? cacheGroupSource.getName : this.options.getName, usedExports: cacheGroupSource.usedExports !== undefined ? cacheGroupSource.usedExports : this.options.usedExports, filename: cacheGroupSource.filename !== undefined ? cacheGroupSource.filename : this.options.filename, automaticNameDelimiter: cacheGroupSource.automaticNameDelimiter !== undefined ? cacheGroupSource.automaticNameDelimiter : this.options.automaticNameDelimiter, idHint: cacheGroupSource.idHint !== undefined ? cacheGroupSource.idHint : cacheGroupSource.key, reuseExistingChunk: cacheGroupSource.reuseExistingChunk || false, _validateSize: hasNonZeroSizes(minSize), _validateRemainingSize: hasNonZeroSizes(minRemainingSize), _minSizeForMaxSize: mergeSizes( cacheGroupSource.minSize, this.options.minSize ), _conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold) }; this._cacheGroupCache.set(cacheGroupSource, cacheGroup); return cacheGroup; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { const cachedMakePathsRelative = makePathsRelative.bindContextCache( compiler.context, compiler.root ); compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => { const logger = compilation.getLogger("webpack.SplitChunksPlugin"); let alreadyOptimized = false; compilation.hooks.unseal.tap("SplitChunksPlugin", () => { alreadyOptimized = false; }); compilation.hooks.optimizeChunks.tap( { name: "SplitChunksPlugin", stage: STAGE_ADVANCED }, chunks => { if (alreadyOptimized) return; alreadyOptimized = true; logger.time("prepare"); const chunkGraph = compilation.chunkGraph; const moduleGraph = compilation.moduleGraph; // Give each selected chunk an index (to create strings from chunks) /** @type {Map} */ const chunkIndexMap = new Map(); const ZERO = BigInt("0"); const ONE = BigInt("1"); const START = ONE << BigInt("31"); let index = START; for (const chunk of chunks) { chunkIndexMap.set( chunk, index | BigInt((Math.random() * 0x7fffffff) | 0) ); index = index << ONE; } /** * @param {Iterable} chunks list of chunks * @returns {bigint | Chunk} key of the chunks */ const getKey = chunks => { const iterator = chunks[Symbol.iterator](); let result = iterator.next(); if (result.done) return ZERO; const first = result.value; result = iterator.next(); if (result.done) return first; let key = chunkIndexMap.get(first) | chunkIndexMap.get(result.value); while (!(result = iterator.next()).done) { const raw = chunkIndexMap.get(result.value); key = key ^ raw; } return key; }; const keyToString = key => { if (typeof key === "bigint") return key.toString(16); return chunkIndexMap.get(key).toString(16); }; const getChunkSetsInGraph = memoize(() => { /** @type {Map>} */ const chunkSetsInGraph = new Map(); /** @type {Set} */ const singleChunkSets = new Set(); for (const module of compilation.modules) { const chunks = chunkGraph.getModuleChunksIterable(module); const chunksKey = getKey(chunks); if (typeof chunksKey === "bigint") { if (!chunkSetsInGraph.has(chunksKey)) { chunkSetsInGraph.set(chunksKey, new Set(chunks)); } } else { singleChunkSets.add(chunksKey); } } return { chunkSetsInGraph, singleChunkSets }; }); /** * @param {Module} module the module * @returns {Iterable} groups of chunks with equal exports */ const groupChunksByExports = module => { const exportsInfo = moduleGraph.getExportsInfo(module); const groupedByUsedExports = new Map(); for (const chunk of chunkGraph.getModuleChunksIterable(module)) { const key = exportsInfo.getUsageKey(chunk.runtime); const list = groupedByUsedExports.get(key); if (list !== undefined) { list.push(chunk); } else { groupedByUsedExports.set(key, [chunk]); } } return groupedByUsedExports.values(); }; /** @type {Map>} */ const groupedByExportsMap = new Map(); const getExportsChunkSetsInGraph = memoize(() => { /** @type {Map>} */ const chunkSetsInGraph = new Map(); /** @type {Set} */ const singleChunkSets = new Set(); for (const module of compilation.modules) { const groupedChunks = Array.from(groupChunksByExports(module)); groupedByExportsMap.set(module, groupedChunks); for (const chunks of groupedChunks) { if (chunks.length === 1) { singleChunkSets.add(chunks[0]); } else { const chunksKey = /** @type {bigint} */ (getKey(chunks)); if (!chunkSetsInGraph.has(chunksKey)) { chunkSetsInGraph.set(chunksKey, new Set(chunks)); } } } } return { chunkSetsInGraph, singleChunkSets }; }); // group these set of chunks by count // to allow to check less sets via isSubset // (only smaller sets can be subset) const groupChunkSetsByCount = chunkSets => { /** @type {Map>>} */ const chunkSetsByCount = new Map(); for (const chunksSet of chunkSets) { const count = chunksSet.size; let array = chunkSetsByCount.get(count); if (array === undefined) { array = []; chunkSetsByCount.set(count, array); } array.push(chunksSet); } return chunkSetsByCount; }; const getChunkSetsByCount = memoize(() => groupChunkSetsByCount( getChunkSetsInGraph().chunkSetsInGraph.values() ) ); const getExportsChunkSetsByCount = memoize(() => groupChunkSetsByCount( getExportsChunkSetsInGraph().chunkSetsInGraph.values() ) ); // Create a list of possible combinations const createGetCombinations = ( chunkSets, singleChunkSets, chunkSetsByCount ) => { /** @type {Map | Chunk)[]>} */ const combinationsCache = new Map(); return key => { const cacheEntry = combinationsCache.get(key); if (cacheEntry !== undefined) return cacheEntry; if (key instanceof Chunk) { const result = [key]; combinationsCache.set(key, result); return result; } const chunksSet = chunkSets.get(key); /** @type {(Set | Chunk)[]} */ const array = [chunksSet]; for (const [count, setArray] of chunkSetsByCount) { // "equal" is not needed because they would have been merge in the first step if (count < chunksSet.size) { for (const set of setArray) { if (isSubset(chunksSet, set)) { array.push(set); } } } } for (const chunk of singleChunkSets) { if (chunksSet.has(chunk)) { array.push(chunk); } } combinationsCache.set(key, array); return array; }; }; const getCombinationsFactory = memoize(() => { const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph(); return createGetCombinations( chunkSetsInGraph, singleChunkSets, getChunkSetsByCount() ); }); const getCombinations = key => getCombinationsFactory()(key); const getExportsCombinationsFactory = memoize(() => { const { chunkSetsInGraph, singleChunkSets } = getExportsChunkSetsInGraph(); return createGetCombinations( chunkSetsInGraph, singleChunkSets, getExportsChunkSetsByCount() ); }); const getExportsCombinations = key => getExportsCombinationsFactory()(key); /** * @typedef {Object} SelectedChunksResult * @property {Chunk[]} chunks the list of chunks * @property {bigint | Chunk} key a key of the list */ /** @type {WeakMap | Chunk, WeakMap>} */ const selectedChunksCacheByChunksSet = new WeakMap(); /** * get list and key by applying the filter function to the list * It is cached for performance reasons * @param {Set | Chunk} chunks list of chunks * @param {ChunkFilterFunction} chunkFilter filter function for chunks * @returns {SelectedChunksResult} list and key */ const getSelectedChunks = (chunks, chunkFilter) => { let entry = selectedChunksCacheByChunksSet.get(chunks); if (entry === undefined) { entry = new WeakMap(); selectedChunksCacheByChunksSet.set(chunks, entry); } /** @type {SelectedChunksResult} */ let entry2 = entry.get(chunkFilter); if (entry2 === undefined) { /** @type {Chunk[]} */ const selectedChunks = []; if (chunks instanceof Chunk) { if (chunkFilter(chunks)) selectedChunks.push(chunks); } else { for (const chunk of chunks) { if (chunkFilter(chunk)) selectedChunks.push(chunk); } } entry2 = { chunks: selectedChunks, key: getKey(selectedChunks) }; entry.set(chunkFilter, entry2); } return entry2; }; /** @type {Map} */ const alreadyValidatedParents = new Map(); /** @type {Set} */ const alreadyReportedErrors = new Set(); // Map a list of chunks to a list of modules // For the key the chunk "index" is used, the value is a SortableSet of modules /** @type {Map} */ const chunksInfoMap = new Map(); /** * @param {CacheGroup} cacheGroup the current cache group * @param {number} cacheGroupIndex the index of the cache group of ordering * @param {Chunk[]} selectedChunks chunks selected for this module * @param {bigint | Chunk} selectedChunksKey a key of selectedChunks * @param {Module} module the current module * @returns {void} */ const addModuleToChunksInfoMap = ( cacheGroup, cacheGroupIndex, selectedChunks, selectedChunksKey, module ) => { // Break if minimum number of chunks is not reached if (selectedChunks.length < cacheGroup.minChunks) return; // Determine name for split chunk const name = cacheGroup.getName( module, selectedChunks, cacheGroup.key ); // Check if the name is ok const existingChunk = compilation.namedChunks.get(name); if (existingChunk) { const parentValidationKey = `${name}|${ typeof selectedChunksKey === "bigint" ? selectedChunksKey : selectedChunksKey.debugId }`; const valid = alreadyValidatedParents.get(parentValidationKey); if (valid === false) return; if (valid === undefined) { // Module can only be moved into the existing chunk if the existing chunk // is a parent of all selected chunks let isInAllParents = true; /** @type {Set} */ const queue = new Set(); for (const chunk of selectedChunks) { for (const group of chunk.groupsIterable) { queue.add(group); } } for (const group of queue) { if (existingChunk.isInGroup(group)) continue; let hasParent = false; for (const parent of group.parentsIterable) { hasParent = true; queue.add(parent); } if (!hasParent) { isInAllParents = false; } } const valid = isInAllParents; alreadyValidatedParents.set(parentValidationKey, valid); if (!valid) { if (!alreadyReportedErrors.has(name)) { alreadyReportedErrors.add(name); compilation.errors.push( new WebpackError( "SplitChunksPlugin\n" + `Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` + `Both have the same name "${name}" and existing chunk is not a parent of the selected modules.\n` + "Use a different name for the cache group or make sure that the existing chunk is a parent (e. g. via dependOn).\n" + 'HINT: You can omit "name" to automatically create a name.\n' + "BREAKING CHANGE: webpack < 5 used to allow to use an entrypoint as splitChunk. " + "This is no longer allowed when the entrypoint is not a parent of the selected modules.\n" + "Remove this entrypoint and add modules to cache group's 'test' instead. " + "If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " + "See migration guide of more info." ) ); } return; } } } // Create key for maps // When it has a name we use the name as key // Otherwise we create the key from chunks and cache group key // This automatically merges equal names const key = cacheGroup.key + (name ? ` name:${name}` : ` chunks:${keyToString(selectedChunksKey)}`); // Add module to maps let info = chunksInfoMap.get(key); if (info === undefined) { chunksInfoMap.set( key, (info = { modules: new SortableSet( undefined, compareModulesByIdentifier ), cacheGroup, cacheGroupIndex, name, sizes: {}, chunks: new Set(), reuseableChunks: new Set(), chunksKeys: new Set() }) ); } const oldSize = info.modules.size; info.modules.add(module); if (info.modules.size !== oldSize) { for (const type of module.getSourceTypes()) { info.sizes[type] = (info.sizes[type] || 0) + module.size(type); } } const oldChunksKeysSize = info.chunksKeys.size; info.chunksKeys.add(selectedChunksKey); if (oldChunksKeysSize !== info.chunksKeys.size) { for (const chunk of selectedChunks) { info.chunks.add(chunk); } } }; const context = { moduleGraph, chunkGraph }; logger.timeEnd("prepare"); logger.time("modules"); // Walk through all modules for (const module of compilation.modules) { // Get cache group let cacheGroups = this.options.getCacheGroups(module, context); if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) { continue; } // Prepare some values (usedExports = false) const getCombs = memoize(() => { const chunks = chunkGraph.getModuleChunksIterable(module); const chunksKey = getKey(chunks); return getCombinations(chunksKey); }); // Prepare some values (usedExports = true) const getCombsByUsedExports = memoize(() => { // fill the groupedByExportsMap getExportsChunkSetsInGraph(); /** @type {Set | Chunk>} */ const set = new Set(); const groupedByUsedExports = groupedByExportsMap.get(module); for (const chunks of groupedByUsedExports) { const chunksKey = getKey(chunks); for (const comb of getExportsCombinations(chunksKey)) set.add(comb); } return set; }); let cacheGroupIndex = 0; for (const cacheGroupSource of cacheGroups) { const cacheGroup = this._getCacheGroup(cacheGroupSource); const combs = cacheGroup.usedExports ? getCombsByUsedExports() : getCombs(); // For all combination of chunk selection for (const chunkCombination of combs) { // Break if minimum number of chunks is not reached const count = chunkCombination instanceof Chunk ? 1 : chunkCombination.size; if (count < cacheGroup.minChunks) continue; // Select chunks by configuration const { chunks: selectedChunks, key: selectedChunksKey } = getSelectedChunks(chunkCombination, cacheGroup.chunksFilter); addModuleToChunksInfoMap( cacheGroup, cacheGroupIndex, selectedChunks, selectedChunksKey, module ); } cacheGroupIndex++; } } logger.timeEnd("modules"); logger.time("queue"); /** * @param {ChunksInfoItem} info entry * @param {string[]} sourceTypes source types to be removed */ const removeModulesWithSourceType = (info, sourceTypes) => { for (const module of info.modules) { const types = module.getSourceTypes(); if (sourceTypes.some(type => types.has(type))) { info.modules.delete(module); for (const type of types) { info.sizes[type] -= module.size(type); } } } }; /** * @param {ChunksInfoItem} info entry * @returns {boolean} true, if entry become empty */ const removeMinSizeViolatingModules = info => { if (!info.cacheGroup._validateSize) return false; const violatingSizes = getViolatingMinSizes( info.sizes, info.cacheGroup.minSize ); if (violatingSizes === undefined) return false; removeModulesWithSourceType(info, violatingSizes); return info.modules.size === 0; }; // Filter items were size < minSize for (const [key, info] of chunksInfoMap) { if (removeMinSizeViolatingModules(info)) { chunksInfoMap.delete(key); } else if ( !checkMinSizeReduction( info.sizes, info.cacheGroup.minSizeReduction, info.chunks.size ) ) { chunksInfoMap.delete(key); } } /** * @typedef {Object} MaxSizeQueueItem * @property {SplitChunksSizes} minSize * @property {SplitChunksSizes} maxAsyncSize * @property {SplitChunksSizes} maxInitialSize * @property {string} automaticNameDelimiter * @property {string[]} keys */ /** @type {Map} */ const maxSizeQueueMap = new Map(); while (chunksInfoMap.size > 0) { // Find best matching entry let bestEntryKey; let bestEntry; for (const pair of chunksInfoMap) { const key = pair[0]; const info = pair[1]; if ( bestEntry === undefined || compareEntries(bestEntry, info) < 0 ) { bestEntry = info; bestEntryKey = key; } } const item = bestEntry; chunksInfoMap.delete(bestEntryKey); let chunkName = item.name; // Variable for the new chunk (lazy created) /** @type {Chunk} */ let newChunk; // When no chunk name, check if we can reuse a chunk instead of creating a new one let isExistingChunk = false; let isReusedWithAllModules = false; if (chunkName) { const chunkByName = compilation.namedChunks.get(chunkName); if (chunkByName !== undefined) { newChunk = chunkByName; const oldSize = item.chunks.size; item.chunks.delete(newChunk); isExistingChunk = item.chunks.size !== oldSize; } } else if (item.cacheGroup.reuseExistingChunk) { outer: for (const chunk of item.chunks) { if ( chunkGraph.getNumberOfChunkModules(chunk) !== item.modules.size ) { continue; } if ( item.chunks.size > 1 && chunkGraph.getNumberOfEntryModules(chunk) > 0 ) { continue; } for (const module of item.modules) { if (!chunkGraph.isModuleInChunk(module, chunk)) { continue outer; } } if (!newChunk || !newChunk.name) { newChunk = chunk; } else if ( chunk.name && chunk.name.length < newChunk.name.length ) { newChunk = chunk; } else if ( chunk.name && chunk.name.length === newChunk.name.length && chunk.name < newChunk.name ) { newChunk = chunk; } } if (newChunk) { item.chunks.delete(newChunk); chunkName = undefined; isExistingChunk = true; isReusedWithAllModules = true; } } const enforced = item.cacheGroup._conditionalEnforce && checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold); const usedChunks = new Set(item.chunks); // Check if maxRequests condition can be fulfilled if ( !enforced && (Number.isFinite(item.cacheGroup.maxInitialRequests) || Number.isFinite(item.cacheGroup.maxAsyncRequests)) ) { for (const chunk of usedChunks) { // respect max requests const maxRequests = chunk.isOnlyInitial() ? item.cacheGroup.maxInitialRequests : chunk.canBeInitial() ? Math.min( item.cacheGroup.maxInitialRequests, item.cacheGroup.maxAsyncRequests ) : item.cacheGroup.maxAsyncRequests; if ( isFinite(maxRequests) && getRequests(chunk) >= maxRequests ) { usedChunks.delete(chunk); } } } outer: for (const chunk of usedChunks) { for (const module of item.modules) { if (chunkGraph.isModuleInChunk(module, chunk)) continue outer; } usedChunks.delete(chunk); } // Were some (invalid) chunks removed from usedChunks? // => readd all modules to the queue, as things could have been changed if (usedChunks.size < item.chunks.size) { if (isExistingChunk) usedChunks.add(newChunk); if (usedChunks.size >= item.cacheGroup.minChunks) { const chunksArr = Array.from(usedChunks); for (const module of item.modules) { addModuleToChunksInfoMap( item.cacheGroup, item.cacheGroupIndex, chunksArr, getKey(usedChunks), module ); } } continue; } // Validate minRemainingSize constraint when a single chunk is left over if ( !enforced && item.cacheGroup._validateRemainingSize && usedChunks.size === 1 ) { const [chunk] = usedChunks; let chunkSizes = Object.create(null); for (const module of chunkGraph.getChunkModulesIterable(chunk)) { if (!item.modules.has(module)) { for (const type of module.getSourceTypes()) { chunkSizes[type] = (chunkSizes[type] || 0) + module.size(type); } } } const violatingSizes = getViolatingMinSizes( chunkSizes, item.cacheGroup.minRemainingSize ); if (violatingSizes !== undefined) { const oldModulesSize = item.modules.size; removeModulesWithSourceType(item, violatingSizes); if ( item.modules.size > 0 && item.modules.size !== oldModulesSize ) { // queue this item again to be processed again // without violating modules chunksInfoMap.set(bestEntryKey, item); } continue; } } // Create the new chunk if not reusing one if (newChunk === undefined) { newChunk = compilation.addChunk(chunkName); } // Walk through all chunks for (const chunk of usedChunks) { // Add graph connections for splitted chunk chunk.split(newChunk); } // Add a note to the chunk newChunk.chunkReason = (newChunk.chunkReason ? newChunk.chunkReason + ", " : "") + (isReusedWithAllModules ? "reused as split chunk" : "split chunk"); if (item.cacheGroup.key) { newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`; } if (chunkName) { newChunk.chunkReason += ` (name: ${chunkName})`; } if (item.cacheGroup.filename) { newChunk.filenameTemplate = item.cacheGroup.filename; } if (item.cacheGroup.idHint) { newChunk.idNameHints.add(item.cacheGroup.idHint); } if (!isReusedWithAllModules) { // Add all modules to the new chunk for (const module of item.modules) { if (!module.chunkCondition(newChunk, compilation)) continue; // Add module to new chunk chunkGraph.connectChunkAndModule(newChunk, module); // Remove module from used chunks for (const chunk of usedChunks) { chunkGraph.disconnectChunkAndModule(chunk, module); } } } else { // Remove all modules from used chunks for (const module of item.modules) { for (const chunk of usedChunks) { chunkGraph.disconnectChunkAndModule(chunk, module); } } } if ( Object.keys(item.cacheGroup.maxAsyncSize).length > 0 || Object.keys(item.cacheGroup.maxInitialSize).length > 0 ) { const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk); maxSizeQueueMap.set(newChunk, { minSize: oldMaxSizeSettings ? combineSizes( oldMaxSizeSettings.minSize, item.cacheGroup._minSizeForMaxSize, Math.max ) : item.cacheGroup.minSize, maxAsyncSize: oldMaxSizeSettings ? combineSizes( oldMaxSizeSettings.maxAsyncSize, item.cacheGroup.maxAsyncSize, Math.min ) : item.cacheGroup.maxAsyncSize, maxInitialSize: oldMaxSizeSettings ? combineSizes( oldMaxSizeSettings.maxInitialSize, item.cacheGroup.maxInitialSize, Math.min ) : item.cacheGroup.maxInitialSize, automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter, keys: oldMaxSizeSettings ? oldMaxSizeSettings.keys.concat(item.cacheGroup.key) : [item.cacheGroup.key] }); } // remove all modules from other entries and update size for (const [key, info] of chunksInfoMap) { if (isOverlap(info.chunks, usedChunks)) { // update modules and total size // may remove it from the map when < minSize let updated = false; for (const module of item.modules) { if (info.modules.has(module)) { // remove module info.modules.delete(module); // update size for (const key of module.getSourceTypes()) { info.sizes[key] -= module.size(key); } updated = true; } } if (updated) { if (info.modules.size === 0) { chunksInfoMap.delete(key); continue; } if ( removeMinSizeViolatingModules(info) || !checkMinSizeReduction( info.sizes, info.cacheGroup.minSizeReduction, info.chunks.size ) ) { chunksInfoMap.delete(key); continue; } } } } } logger.timeEnd("queue"); logger.time("maxSize"); /** @type {Set} */ const incorrectMinMaxSizeSet = new Set(); const { outputOptions } = compilation; // Make sure that maxSize is fulfilled const { fallbackCacheGroup } = this.options; for (const chunk of Array.from(compilation.chunks)) { const chunkConfig = maxSizeQueueMap.get(chunk); const { minSize, maxAsyncSize, maxInitialSize, automaticNameDelimiter } = chunkConfig || fallbackCacheGroup; if (!chunkConfig && !fallbackCacheGroup.chunksFilter(chunk)) continue; /** @type {SplitChunksSizes} */ let maxSize; if (chunk.isOnlyInitial()) { maxSize = maxInitialSize; } else if (chunk.canBeInitial()) { maxSize = combineSizes(maxAsyncSize, maxInitialSize, Math.min); } else { maxSize = maxAsyncSize; } if (Object.keys(maxSize).length === 0) { continue; } for (const key of Object.keys(maxSize)) { const maxSizeValue = maxSize[key]; const minSizeValue = minSize[key]; if ( typeof minSizeValue === "number" && minSizeValue > maxSizeValue ) { const keys = chunkConfig && chunkConfig.keys; const warningKey = `${ keys && keys.join() } ${minSizeValue} ${maxSizeValue}`; if (!incorrectMinMaxSizeSet.has(warningKey)) { incorrectMinMaxSizeSet.add(warningKey); compilation.warnings.push( new MinMaxSizeWarning(keys, minSizeValue, maxSizeValue) ); } } } const results = deterministicGroupingForModules({ minSize, maxSize: mapObject(maxSize, (value, key) => { const minSizeValue = minSize[key]; return typeof minSizeValue === "number" ? Math.max(value, minSizeValue) : value; }), items: chunkGraph.getChunkModulesIterable(chunk), getKey(module) { const cache = getKeyCache.get(module); if (cache !== undefined) return cache; const ident = cachedMakePathsRelative(module.identifier()); const nameForCondition = module.nameForCondition && module.nameForCondition(); const name = nameForCondition ? cachedMakePathsRelative(nameForCondition) : ident.replace(/^.*!|\?[^?!]*$/g, ""); const fullKey = name + automaticNameDelimiter + hashFilename(ident, outputOptions); const key = requestToId(fullKey); getKeyCache.set(module, key); return key; }, getSize(module) { const size = Object.create(null); for (const key of module.getSourceTypes()) { size[key] = module.size(key); } return size; } }); if (results.length <= 1) { continue; } for (let i = 0; i < results.length; i++) { const group = results[i]; const key = this.options.hidePathInfo ? hashFilename(group.key, outputOptions) : group.key; let name = chunk.name ? chunk.name + automaticNameDelimiter + key : null; if (name && name.length > 100) { name = name.slice(0, 100) + automaticNameDelimiter + hashFilename(name, outputOptions); } if (i !== results.length - 1) { const newPart = compilation.addChunk(name); chunk.split(newPart); newPart.chunkReason = chunk.chunkReason; // Add all modules to the new chunk for (const module of group.items) { if (!module.chunkCondition(newPart, compilation)) { continue; } // Add module to new chunk chunkGraph.connectChunkAndModule(newPart, module); // Remove module from used chunks chunkGraph.disconnectChunkAndModule(chunk, module); } } else { // change the chunk to be a part chunk.name = name; } } } logger.timeEnd("maxSize"); } ); }); } }; MinChunkSizePlugin.js000066600000006407150441747060010653 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_ADVANCED } = require("../OptimizationStages"); const createSchemaValidation = require("../util/create-schema-validation"); /** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */ /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../Compiler")} Compiler */ const validate = createSchemaValidation( require("../../schemas/plugins/optimize/MinChunkSizePlugin.check.js"), () => require("../../schemas/plugins/optimize/MinChunkSizePlugin.json"), { name: "Min Chunk Size Plugin", baseDataPath: "options" } ); class MinChunkSizePlugin { /** * @param {MinChunkSizePluginOptions} options options object */ constructor(options) { validate(options); this.options = options; } /** * Apply the plugin * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { const options = this.options; const minChunkSize = options.minChunkSize; compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => { compilation.hooks.optimizeChunks.tap( { name: "MinChunkSizePlugin", stage: STAGE_ADVANCED }, chunks => { const chunkGraph = compilation.chunkGraph; const equalOptions = { chunkOverhead: 1, entryChunkMultiplicator: 1 }; const chunkSizesMap = new Map(); /** @type {[Chunk, Chunk][]} */ const combinations = []; /** @type {Chunk[]} */ const smallChunks = []; const visitedChunks = []; for (const a of chunks) { // check if one of the chunks sizes is smaller than the minChunkSize // and filter pairs that can NOT be integrated! if (chunkGraph.getChunkSize(a, equalOptions) < minChunkSize) { smallChunks.push(a); for (const b of visitedChunks) { if (chunkGraph.canChunksBeIntegrated(b, a)) combinations.push([b, a]); } } else { for (const b of smallChunks) { if (chunkGraph.canChunksBeIntegrated(b, a)) combinations.push([b, a]); } } chunkSizesMap.set(a, chunkGraph.getChunkSize(a, options)); visitedChunks.push(a); } const sortedSizeFilteredExtendedPairCombinations = combinations .map(pair => { // extend combination pairs with size and integrated size const a = chunkSizesMap.get(pair[0]); const b = chunkSizesMap.get(pair[1]); const ab = chunkGraph.getIntegratedChunksSize( pair[0], pair[1], options ); /** @type {[number, number, Chunk, Chunk]} */ const extendedPair = [a + b - ab, ab, pair[0], pair[1]]; return extendedPair; }) .sort((a, b) => { // sadly javascript does an in place sort here // sort by size const diff = b[0] - a[0]; if (diff !== 0) return diff; return a[1] - b[1]; }); if (sortedSizeFilteredExtendedPairCombinations.length === 0) return; const pair = sortedSizeFilteredExtendedPairCombinations[0]; chunkGraph.integrateChunks(pair[2], pair[3]); compilation.chunks.delete(pair[3]); return true; } ); }); } } module.exports = MinChunkSizePlugin; MergeDuplicateChunksPlugin.js000066600000007020150441747060012342 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_BASIC } = require("../OptimizationStages"); const { runtimeEqual } = require("../util/runtime"); /** @typedef {import("../Compiler")} Compiler */ class MergeDuplicateChunksPlugin { /** * @param {Compiler} compiler the compiler * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap( "MergeDuplicateChunksPlugin", compilation => { compilation.hooks.optimizeChunks.tap( { name: "MergeDuplicateChunksPlugin", stage: STAGE_BASIC }, chunks => { const { chunkGraph, moduleGraph } = compilation; // remember already tested chunks for performance const notDuplicates = new Set(); // for each chunk for (const chunk of chunks) { // track a Set of all chunk that could be duplicates let possibleDuplicates; for (const module of chunkGraph.getChunkModulesIterable(chunk)) { if (possibleDuplicates === undefined) { // when possibleDuplicates is not yet set, // create a new Set from chunks of the current module // including only chunks with the same number of modules for (const dup of chunkGraph.getModuleChunksIterable( module )) { if ( dup !== chunk && chunkGraph.getNumberOfChunkModules(chunk) === chunkGraph.getNumberOfChunkModules(dup) && !notDuplicates.has(dup) ) { // delay allocating the new Set until here, reduce memory pressure if (possibleDuplicates === undefined) { possibleDuplicates = new Set(); } possibleDuplicates.add(dup); } } // when no chunk is possible we can break here if (possibleDuplicates === undefined) break; } else { // validate existing possible duplicates for (const dup of possibleDuplicates) { // remove possible duplicate when module is not contained if (!chunkGraph.isModuleInChunk(module, dup)) { possibleDuplicates.delete(dup); } } // when all chunks has been removed we can break here if (possibleDuplicates.size === 0) break; } } // when we found duplicates if ( possibleDuplicates !== undefined && possibleDuplicates.size > 0 ) { outer: for (const otherChunk of possibleDuplicates) { if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue; if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue; if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0) continue; if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) { for (const module of chunkGraph.getChunkModulesIterable( chunk )) { const exportsInfo = moduleGraph.getExportsInfo(module); if ( !exportsInfo.isEquallyUsed( chunk.runtime, otherChunk.runtime ) ) { continue outer; } } } // merge them if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) { chunkGraph.integrateChunks(chunk, otherChunk); compilation.chunks.delete(otherChunk); } } } // don't check already processed chunks twice notDuplicates.add(chunk); } } ); } ); } } module.exports = MergeDuplicateChunksPlugin; LimitChunkCountPlugin.js000066600000020310150441747060011351 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_ADVANCED } = require("../OptimizationStages"); const LazyBucketSortedSet = require("../util/LazyBucketSortedSet"); const { compareChunks } = require("../util/comparators"); const createSchemaValidation = require("../util/create-schema-validation"); /** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */ /** @typedef {import("../Chunk")} Chunk */ /** @typedef {import("../Compiler")} Compiler */ const validate = createSchemaValidation( require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"), () => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"), { name: "Limit Chunk Count Plugin", baseDataPath: "options" } ); /** * @typedef {Object} ChunkCombination * @property {boolean} deleted this is set to true when combination was removed * @property {number} sizeDiff * @property {number} integratedSize * @property {Chunk} a * @property {Chunk} b * @property {number} aIdx * @property {number} bIdx * @property {number} aSize * @property {number} bSize */ const addToSetMap = (map, key, value) => { const set = map.get(key); if (set === undefined) { map.set(key, new Set([value])); } else { set.add(value); } }; class LimitChunkCountPlugin { /** * @param {LimitChunkCountPluginOptions=} options options object */ constructor(options) { validate(options); this.options = options; } /** * @param {Compiler} compiler the webpack compiler * @returns {void} */ apply(compiler) { const options = this.options; compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => { compilation.hooks.optimizeChunks.tap( { name: "LimitChunkCountPlugin", stage: STAGE_ADVANCED }, chunks => { const chunkGraph = compilation.chunkGraph; const maxChunks = options.maxChunks; if (!maxChunks) return; if (maxChunks < 1) return; if (compilation.chunks.size <= maxChunks) return; let remainingChunksToMerge = compilation.chunks.size - maxChunks; // order chunks in a deterministic way const compareChunksWithGraph = compareChunks(chunkGraph); const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph); // create a lazy sorted data structure to keep all combinations // this is large. Size = chunks * (chunks - 1) / 2 // It uses a multi layer bucket sort plus normal sort in the last layer // It's also lazy so only accessed buckets are sorted const combinations = new LazyBucketSortedSet( // Layer 1: ordered by largest size benefit c => c.sizeDiff, (a, b) => b - a, // Layer 2: ordered by smallest combined size c => c.integratedSize, (a, b) => a - b, // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic) c => c.bIdx - c.aIdx, (a, b) => a - b, // Layer 4: ordered by position in orderedChunk (-> to be deterministic) (a, b) => a.bIdx - b.bIdx ); // we keep a mapping from chunk to all combinations // but this mapping is not kept up-to-date with deletions // so `deleted` flag need to be considered when iterating this /** @type {Map>} */ const combinationsByChunk = new Map(); orderedChunks.forEach((b, bIdx) => { // create combination pairs with size and integrated size for (let aIdx = 0; aIdx < bIdx; aIdx++) { const a = orderedChunks[aIdx]; // filter pairs that can not be integrated! if (!chunkGraph.canChunksBeIntegrated(a, b)) continue; const integratedSize = chunkGraph.getIntegratedChunksSize( a, b, options ); const aSize = chunkGraph.getChunkSize(a, options); const bSize = chunkGraph.getChunkSize(b, options); const c = { deleted: false, sizeDiff: aSize + bSize - integratedSize, integratedSize, a, b, aIdx, bIdx, aSize, bSize }; combinations.add(c); addToSetMap(combinationsByChunk, a, c); addToSetMap(combinationsByChunk, b, c); } return combinations; }); // list of modified chunks during this run // combinations affected by this change are skipped to allow // further optimizations /** @type {Set} */ const modifiedChunks = new Set(); let changed = false; // eslint-disable-next-line no-constant-condition loop: while (true) { const combination = combinations.popFirst(); if (combination === undefined) break; combination.deleted = true; const { a, b, integratedSize } = combination; // skip over pair when // one of the already merged chunks is a parent of one of the chunks if (modifiedChunks.size > 0) { const queue = new Set(a.groupsIterable); for (const group of b.groupsIterable) { queue.add(group); } for (const group of queue) { for (const mChunk of modifiedChunks) { if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) { // This is a potential pair which needs recalculation // We can't do that now, but it merge before following pairs // so we leave space for it, and consider chunks as modified // just for the worse case remainingChunksToMerge--; if (remainingChunksToMerge <= 0) break loop; modifiedChunks.add(a); modifiedChunks.add(b); continue loop; } } for (const parent of group.parentsIterable) { queue.add(parent); } } } // merge the chunks if (chunkGraph.canChunksBeIntegrated(a, b)) { chunkGraph.integrateChunks(a, b); compilation.chunks.delete(b); // flag chunk a as modified as further optimization are possible for all children here modifiedChunks.add(a); changed = true; remainingChunksToMerge--; if (remainingChunksToMerge <= 0) break; // Update all affected combinations // delete all combination with the removed chunk // we will use combinations with the kept chunk instead for (const combination of combinationsByChunk.get(a)) { if (combination.deleted) continue; combination.deleted = true; combinations.delete(combination); } // Update combinations with the kept chunk with new sizes for (const combination of combinationsByChunk.get(b)) { if (combination.deleted) continue; if (combination.a === b) { if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) { combination.deleted = true; combinations.delete(combination); continue; } // Update size const newIntegratedSize = chunkGraph.getIntegratedChunksSize( a, combination.b, options ); const finishUpdate = combinations.startUpdate(combination); combination.a = a; combination.integratedSize = newIntegratedSize; combination.aSize = integratedSize; combination.sizeDiff = combination.bSize + integratedSize - newIntegratedSize; finishUpdate(); } else if (combination.b === b) { if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) { combination.deleted = true; combinations.delete(combination); continue; } // Update size const newIntegratedSize = chunkGraph.getIntegratedChunksSize( combination.a, a, options ); const finishUpdate = combinations.startUpdate(combination); combination.b = a; combination.integratedSize = newIntegratedSize; combination.bSize = integratedSize; combination.sizeDiff = integratedSize + combination.aSize - newIntegratedSize; finishUpdate(); } } combinationsByChunk.set(a, combinationsByChunk.get(b)); combinationsByChunk.delete(b); } } if (changed) return true; } ); }); } } module.exports = LimitChunkCountPlugin; RemoveParentModulesPlugin.js000066600000007326150441747060012245 0ustar00/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { STAGE_BASIC } = require("../OptimizationStages"); const Queue = require("../util/Queue"); const { intersect } = require("../util/SetHelpers"); /** @typedef {import("../Compiler")} Compiler */ class RemoveParentModulesPlugin { /** * @param {Compiler} compiler the compiler * @returns {void} */ apply(compiler) { compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => { const handler = (chunks, chunkGroups) => { const chunkGraph = compilation.chunkGraph; const queue = new Queue(); const availableModulesMap = new WeakMap(); for (const chunkGroup of compilation.entrypoints.values()) { // initialize available modules for chunks without parents availableModulesMap.set(chunkGroup, new Set()); for (const child of chunkGroup.childrenIterable) { queue.enqueue(child); } } for (const chunkGroup of compilation.asyncEntrypoints) { // initialize available modules for chunks without parents availableModulesMap.set(chunkGroup, new Set()); for (const child of chunkGroup.childrenIterable) { queue.enqueue(child); } } while (queue.length > 0) { const chunkGroup = queue.dequeue(); let availableModules = availableModulesMap.get(chunkGroup); let changed = false; for (const parent of chunkGroup.parentsIterable) { const availableModulesInParent = availableModulesMap.get(parent); if (availableModulesInParent !== undefined) { // If we know the available modules in parent: process these if (availableModules === undefined) { // if we have not own info yet: create new entry availableModules = new Set(availableModulesInParent); for (const chunk of parent.chunks) { for (const m of chunkGraph.getChunkModulesIterable(chunk)) { availableModules.add(m); } } availableModulesMap.set(chunkGroup, availableModules); changed = true; } else { for (const m of availableModules) { if ( !chunkGraph.isModuleInChunkGroup(m, parent) && !availableModulesInParent.has(m) ) { availableModules.delete(m); changed = true; } } } } } if (changed) { // if something changed: enqueue our children for (const child of chunkGroup.childrenIterable) { queue.enqueue(child); } } } // now we have available modules for every chunk for (const chunk of chunks) { const availableModulesSets = Array.from( chunk.groupsIterable, chunkGroup => availableModulesMap.get(chunkGroup) ); if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group const availableModules = availableModulesSets.length === 1 ? availableModulesSets[0] : intersect(availableModulesSets); const numberOfModules = chunkGraph.getNumberOfChunkModules(chunk); const toRemove = new Set(); if (numberOfModules < availableModules.size) { for (const m of chunkGraph.getChunkModulesIterable(chunk)) { if (availableModules.has(m)) { toRemove.add(m); } } } else { for (const m of availableModules) { if (chunkGraph.isModuleInChunk(m, chunk)) { toRemove.add(m); } } } for (const module of toRemove) { chunkGraph.disconnectChunkAndModule(chunk, module); } } }; compilation.hooks.optimizeChunks.tap( { name: "RemoveParentModulesPlugin", stage: STAGE_BASIC }, handler ); }); } } module.exports = RemoveParentModulesPlugin;