123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767 |
- /*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
- */
- "use strict";
- const Chunk = require("../Chunk");
- const { STAGE_ADVANCED } = require("../OptimizationStages");
- const WebpackError = require("../WebpackError");
- const { requestToId } = require("../ids/IdHelpers");
- const { isSubset } = require("../util/SetHelpers");
- const SortableSet = require("../util/SortableSet");
- const {
- compareModulesByIdentifier,
- compareIterables
- } = require("../util/comparators");
- const createHash = require("../util/createHash");
- const deterministicGrouping = require("../util/deterministicGrouping");
- const { makePathsRelative } = require("../util/identifier");
- const memoize = require("../util/memoize");
- const MinMaxSizeWarning = require("./MinMaxSizeWarning");
- /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */
- /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */
- /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
- /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
- /** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
- /** @typedef {import("../ChunkGraph")} ChunkGraph */
- /** @typedef {import("../ChunkGroup")} ChunkGroup */
- /** @typedef {import("../Compiler")} Compiler */
- /** @typedef {import("../Module")} Module */
- /** @typedef {import("../ModuleGraph")} ModuleGraph */
- /** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */
- /** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
- /** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
- /** @typedef {Record<string, number>} SplitChunksSizes */
- /**
- * @callback ChunkFilterFunction
- * @param {Chunk} chunk
- * @returns {boolean | undefined}
- */
- /**
- * @callback CombineSizeFunction
- * @param {number} a
- * @param {number} b
- * @returns {number}
- */
- /**
- * @typedef {object} CacheGroupSource
- * @property {string=} key
- * @property {number=} priority
- * @property {GetName=} getName
- * @property {ChunkFilterFunction=} chunksFilter
- * @property {boolean=} enforce
- * @property {SplitChunksSizes} minSize
- * @property {SplitChunksSizes} minSizeReduction
- * @property {SplitChunksSizes} minRemainingSize
- * @property {SplitChunksSizes} enforceSizeThreshold
- * @property {SplitChunksSizes} maxAsyncSize
- * @property {SplitChunksSizes} maxInitialSize
- * @property {number=} minChunks
- * @property {number=} maxAsyncRequests
- * @property {number=} maxInitialRequests
- * @property {TemplatePath=} filename
- * @property {string=} idHint
- * @property {string=} automaticNameDelimiter
- * @property {boolean=} reuseExistingChunk
- * @property {boolean=} usedExports
- */
- /**
- * @typedef {object} CacheGroup
- * @property {string} key
- * @property {number=} priority
- * @property {GetName=} getName
- * @property {ChunkFilterFunction=} chunksFilter
- * @property {SplitChunksSizes} minSize
- * @property {SplitChunksSizes} minSizeReduction
- * @property {SplitChunksSizes} minRemainingSize
- * @property {SplitChunksSizes} enforceSizeThreshold
- * @property {SplitChunksSizes} maxAsyncSize
- * @property {SplitChunksSizes} maxInitialSize
- * @property {number=} minChunks
- * @property {number=} maxAsyncRequests
- * @property {number=} maxInitialRequests
- * @property {TemplatePath=} filename
- * @property {string=} idHint
- * @property {string} automaticNameDelimiter
- * @property {boolean} reuseExistingChunk
- * @property {boolean} usedExports
- * @property {boolean} _validateSize
- * @property {boolean} _validateRemainingSize
- * @property {SplitChunksSizes} _minSizeForMaxSize
- * @property {boolean} _conditionalEnforce
- */
- /**
- * @typedef {object} FallbackCacheGroup
- * @property {ChunkFilterFunction} chunksFilter
- * @property {SplitChunksSizes} minSize
- * @property {SplitChunksSizes} maxAsyncSize
- * @property {SplitChunksSizes} maxInitialSize
- * @property {string} automaticNameDelimiter
- */
- /**
- * @typedef {object} CacheGroupsContext
- * @property {ModuleGraph} moduleGraph
- * @property {ChunkGraph} chunkGraph
- */
- /**
- * @callback GetCacheGroups
- * @param {Module} module
- * @param {CacheGroupsContext} context
- * @returns {CacheGroupSource[]}
- */
- /**
- * @callback GetName
- * @param {Module=} module
- * @param {Chunk[]=} chunks
- * @param {string=} key
- * @returns {string=}
- */
- /**
- * @typedef {object} SplitChunksOptions
- * @property {ChunkFilterFunction} chunksFilter
- * @property {string[]} defaultSizeTypes
- * @property {SplitChunksSizes} minSize
- * @property {SplitChunksSizes} minSizeReduction
- * @property {SplitChunksSizes} minRemainingSize
- * @property {SplitChunksSizes} enforceSizeThreshold
- * @property {SplitChunksSizes} maxInitialSize
- * @property {SplitChunksSizes} maxAsyncSize
- * @property {number} minChunks
- * @property {number} maxAsyncRequests
- * @property {number} maxInitialRequests
- * @property {boolean} hidePathInfo
- * @property {TemplatePath} filename
- * @property {string} automaticNameDelimiter
- * @property {GetCacheGroups} getCacheGroups
- * @property {GetName} getName
- * @property {boolean} usedExports
- * @property {FallbackCacheGroup} fallbackCacheGroup
- */
- /**
- * @typedef {object} ChunksInfoItem
- * @property {SortableSet<Module>} modules
- * @property {CacheGroup} cacheGroup
- * @property {number} cacheGroupIndex
- * @property {string} name
- * @property {Record<string, number>} sizes
- * @property {Set<Chunk>} chunks
- * @property {Set<Chunk>} reusableChunks
- * @property {Set<bigint | Chunk>} chunksKeys
- */
- const defaultGetName = /** @type {GetName} */ (() => {});
- const deterministicGroupingForModules =
- /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */
- (deterministicGrouping);
- /** @type {WeakMap<Module, string>} */
- const getKeyCache = new WeakMap();
- /**
- * @param {string} name a filename to hash
- * @param {OutputOptions} outputOptions hash function used
- * @returns {string} hashed filename
- */
- const hashFilename = (name, outputOptions) => {
- const digest =
- /** @type {string} */
- (
- createHash(outputOptions.hashFunction)
- .update(name)
- .digest(outputOptions.hashDigest)
- );
- return digest.slice(0, 8);
- };
- /**
- * @param {Chunk} chunk the chunk
- * @returns {number} the number of requests
- */
- const getRequests = chunk => {
- let requests = 0;
- for (const chunkGroup of chunk.groupsIterable) {
- requests = Math.max(requests, chunkGroup.chunks.length);
- }
- return requests;
- };
- /**
- * @template {object} T
- * @template {object} R
- * @param {T} obj obj an object
- * @param {function(T[keyof T], keyof T): T[keyof T]} fn fn
- * @returns {T} result
- */
- const mapObject = (obj, fn) => {
- const newObj = Object.create(null);
- for (const key of Object.keys(obj)) {
- newObj[key] = fn(
- obj[/** @type {keyof T} */ (key)],
- /** @type {keyof T} */
- (key)
- );
- }
- return newObj;
- };
- /**
- * @template T
- * @param {Set<T>} a set
- * @param {Set<T>} b other set
- * @returns {boolean} true if at least one item of a is in b
- */
- const isOverlap = (a, b) => {
- for (const item of a) {
- if (b.has(item)) return true;
- }
- return false;
- };
- const compareModuleIterables = compareIterables(compareModulesByIdentifier);
- /**
- * @param {ChunksInfoItem} a item
- * @param {ChunksInfoItem} b item
- * @returns {number} compare result
- */
- const compareEntries = (a, b) => {
- // 1. by priority
- const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
- if (diffPriority) return diffPriority;
- // 2. by number of chunks
- const diffCount = a.chunks.size - b.chunks.size;
- if (diffCount) return diffCount;
- // 3. by size reduction
- const aSizeReduce = totalSize(a.sizes) * (a.chunks.size - 1);
- const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1);
- const diffSizeReduce = aSizeReduce - bSizeReduce;
- if (diffSizeReduce) return diffSizeReduce;
- // 4. by cache group index
- const indexDiff = b.cacheGroupIndex - a.cacheGroupIndex;
- if (indexDiff) return indexDiff;
- // 5. by number of modules (to be able to compare by identifier)
- const modulesA = a.modules;
- const modulesB = b.modules;
- const diff = modulesA.size - modulesB.size;
- if (diff) return diff;
- // 6. by module identifiers
- modulesA.sort();
- modulesB.sort();
- return compareModuleIterables(modulesA, modulesB);
- };
- /**
- * @param {Chunk} chunk the chunk
- * @returns {boolean} true, if the chunk is an entry chunk
- */
- const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
- /**
- * @param {Chunk} chunk the chunk
- * @returns {boolean} true, if the chunk is an async chunk
- */
- const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
- /**
- * @param {Chunk} chunk the chunk
- * @returns {boolean} always true
- */
- const ALL_CHUNK_FILTER = chunk => true;
- /**
- * @param {OptimizationSplitChunksSizes | undefined} value the sizes
- * @param {string[]} defaultSizeTypes the default size types
- * @returns {SplitChunksSizes} normalized representation
- */
- const normalizeSizes = (value, defaultSizeTypes) => {
- if (typeof value === "number") {
- /** @type {Record<string, number>} */
- const o = {};
- for (const sizeType of defaultSizeTypes) o[sizeType] = value;
- return o;
- } else if (typeof value === "object" && value !== null) {
- return { ...value };
- }
- return {};
- };
- /**
- * @param {...(SplitChunksSizes | undefined)} sizes the sizes
- * @returns {SplitChunksSizes} the merged sizes
- */
- const mergeSizes = (...sizes) => {
- /** @type {SplitChunksSizes} */
- let merged = {};
- for (let i = sizes.length - 1; i >= 0; i--) {
- merged = Object.assign(merged, sizes[i]);
- }
- return merged;
- };
- /**
- * @param {SplitChunksSizes} sizes the sizes
- * @returns {boolean} true, if there are sizes > 0
- */
- const hasNonZeroSizes = sizes => {
- for (const key of Object.keys(sizes)) {
- if (sizes[key] > 0) return true;
- }
- return false;
- };
- /**
- * @param {SplitChunksSizes} a first sizes
- * @param {SplitChunksSizes} b second sizes
- * @param {CombineSizeFunction} combine a function to combine sizes
- * @returns {SplitChunksSizes} the combine sizes
- */
- const combineSizes = (a, b, combine) => {
- const aKeys = new Set(Object.keys(a));
- const bKeys = new Set(Object.keys(b));
- /** @type {SplitChunksSizes} */
- const result = {};
- for (const key of aKeys) {
- result[key] = bKeys.has(key) ? combine(a[key], b[key]) : a[key];
- }
- for (const key of bKeys) {
- if (!aKeys.has(key)) {
- result[key] = b[key];
- }
- }
- return result;
- };
- /**
- * @param {SplitChunksSizes} sizes the sizes
- * @param {SplitChunksSizes} minSize the min sizes
- * @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
- */
- const checkMinSize = (sizes, minSize) => {
- for (const key of Object.keys(minSize)) {
- const size = sizes[key];
- if (size === undefined || size === 0) continue;
- if (size < minSize[key]) return false;
- }
- return true;
- };
- /**
- * @param {SplitChunksSizes} sizes the sizes
- * @param {SplitChunksSizes} minSizeReduction the min sizes
- * @param {number} chunkCount number of chunks
- * @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
- */
- const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
- for (const key of Object.keys(minSizeReduction)) {
- const size = sizes[key];
- if (size === undefined || size === 0) continue;
- if (size * chunkCount < minSizeReduction[key]) return false;
- }
- return true;
- };
- /**
- * @param {SplitChunksSizes} sizes the sizes
- * @param {SplitChunksSizes} minSize the min sizes
- * @returns {undefined | string[]} list of size types that are below min size
- */
- const getViolatingMinSizes = (sizes, minSize) => {
- let list;
- for (const key of Object.keys(minSize)) {
- const size = sizes[key];
- if (size === undefined || size === 0) continue;
- if (size < minSize[key]) {
- if (list === undefined) list = [key];
- else list.push(key);
- }
- }
- return list;
- };
- /**
- * @param {SplitChunksSizes} sizes the sizes
- * @returns {number} the total size
- */
- const totalSize = sizes => {
- let size = 0;
- for (const key of Object.keys(sizes)) {
- size += sizes[key];
- }
- return size;
- };
- /**
- * @param {false|string|Function|undefined} name the chunk name
- * @returns {GetName | undefined} a function to get the name of the chunk
- */
- const normalizeName = name => {
- if (typeof name === "string") {
- return () => name;
- }
- if (typeof name === "function") {
- return /** @type {GetName} */ (name);
- }
- };
- /**
- * @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option
- * @returns {ChunkFilterFunction} the chunk filter function
- */
- const normalizeChunksFilter = chunks => {
- if (chunks === "initial") {
- return INITIAL_CHUNK_FILTER;
- }
- if (chunks === "async") {
- return ASYNC_CHUNK_FILTER;
- }
- if (chunks === "all") {
- return ALL_CHUNK_FILTER;
- }
- if (chunks instanceof RegExp) {
- return chunk => (chunk.name ? chunks.test(chunk.name) : false);
- }
- if (typeof chunks === "function") {
- return chunks;
- }
- };
- /**
- * @param {GetCacheGroups | Record<string, false|string|RegExp|OptimizationSplitChunksGetCacheGroups|OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
- * @param {string[]} defaultSizeTypes the default size types
- * @returns {GetCacheGroups} a function to get the cache groups
- */
- const normalizeCacheGroups = (cacheGroups, defaultSizeTypes) => {
- if (typeof cacheGroups === "function") {
- return cacheGroups;
- }
- if (typeof cacheGroups === "object" && cacheGroups !== null) {
- /** @type {(function(Module, CacheGroupsContext, CacheGroupSource[]): void)[]} */
- const handlers = [];
- for (const key of Object.keys(cacheGroups)) {
- const option = cacheGroups[key];
- if (option === false) {
- continue;
- }
- if (typeof option === "string" || option instanceof RegExp) {
- const source = createCacheGroupSource({}, key, defaultSizeTypes);
- handlers.push((module, context, results) => {
- if (checkTest(option, module, context)) {
- results.push(source);
- }
- });
- } else if (typeof option === "function") {
- const cache = new WeakMap();
- handlers.push((module, context, results) => {
- const result = option(module);
- if (result) {
- const groups = Array.isArray(result) ? result : [result];
- for (const group of groups) {
- const cachedSource = cache.get(group);
- if (cachedSource !== undefined) {
- results.push(cachedSource);
- } else {
- const source = createCacheGroupSource(
- group,
- key,
- defaultSizeTypes
- );
- cache.set(group, source);
- results.push(source);
- }
- }
- }
- });
- } else {
- const source = createCacheGroupSource(option, key, defaultSizeTypes);
- handlers.push((module, context, results) => {
- if (
- checkTest(option.test, module, context) &&
- checkModuleType(option.type, module) &&
- checkModuleLayer(option.layer, module)
- ) {
- results.push(source);
- }
- });
- }
- }
- /**
- * @param {Module} module the current module
- * @param {CacheGroupsContext} context the current context
- * @returns {CacheGroupSource[]} the matching cache groups
- */
- const fn = (module, context) => {
- /** @type {CacheGroupSource[]} */
- const results = [];
- for (const fn of handlers) {
- fn(module, context, results);
- }
- return results;
- };
- return fn;
- }
- return () => null;
- };
- /**
- * @param {undefined|boolean|string|RegExp|Function} test test option
- * @param {Module} module the module
- * @param {CacheGroupsContext} context context object
- * @returns {boolean} true, if the module should be selected
- */
- const checkTest = (test, module, context) => {
- if (test === undefined) return true;
- if (typeof test === "function") {
- return test(module, context);
- }
- if (typeof test === "boolean") return test;
- if (typeof test === "string") {
- const name = module.nameForCondition();
- return name && name.startsWith(test);
- }
- if (test instanceof RegExp) {
- const name = module.nameForCondition();
- return name && test.test(name);
- }
- return false;
- };
- /**
- * @param {undefined|string|RegExp|Function} test type option
- * @param {Module} module the module
- * @returns {boolean} true, if the module should be selected
- */
- const checkModuleType = (test, module) => {
- if (test === undefined) return true;
- if (typeof test === "function") {
- return test(module.type);
- }
- if (typeof test === "string") {
- const type = module.type;
- return test === type;
- }
- if (test instanceof RegExp) {
- const type = module.type;
- return test.test(type);
- }
- return false;
- };
- /**
- * @param {undefined|string|RegExp|Function} test type option
- * @param {Module} module the module
- * @returns {boolean} true, if the module should be selected
- */
- const checkModuleLayer = (test, module) => {
- if (test === undefined) return true;
- if (typeof test === "function") {
- return test(module.layer);
- }
- if (typeof test === "string") {
- const layer = module.layer;
- return test === "" ? !layer : layer && layer.startsWith(test);
- }
- if (test instanceof RegExp) {
- const layer = module.layer;
- return test.test(layer);
- }
- return false;
- };
- /**
- * @param {OptimizationSplitChunksCacheGroup} options the group options
- * @param {string} key key of cache group
- * @param {string[]} defaultSizeTypes the default size types
- * @returns {CacheGroupSource} the normalized cached group
- */
- const createCacheGroupSource = (options, key, defaultSizeTypes) => {
- const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
- const minSizeReduction = normalizeSizes(
- options.minSizeReduction,
- defaultSizeTypes
- );
- const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
- return {
- key,
- priority: options.priority,
- getName: normalizeName(options.name),
- chunksFilter: normalizeChunksFilter(options.chunks),
- enforce: options.enforce,
- minSize,
- minSizeReduction,
- minRemainingSize: mergeSizes(
- normalizeSizes(options.minRemainingSize, defaultSizeTypes),
- minSize
- ),
- enforceSizeThreshold: normalizeSizes(
- options.enforceSizeThreshold,
- defaultSizeTypes
- ),
- maxAsyncSize: mergeSizes(
- normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
- maxSize
- ),
- maxInitialSize: mergeSizes(
- normalizeSizes(options.maxInitialSize, defaultSizeTypes),
- maxSize
- ),
- minChunks: options.minChunks,
- maxAsyncRequests: options.maxAsyncRequests,
- maxInitialRequests: options.maxInitialRequests,
- filename: options.filename,
- idHint: options.idHint,
- automaticNameDelimiter: options.automaticNameDelimiter,
- reuseExistingChunk: options.reuseExistingChunk,
- usedExports: options.usedExports
- };
- };
- module.exports = class SplitChunksPlugin {
- /**
- * @param {OptimizationSplitChunksOptions=} options plugin options
- */
- constructor(options = {}) {
- const defaultSizeTypes = options.defaultSizeTypes || [
- "javascript",
- "unknown"
- ];
- const fallbackCacheGroup = options.fallbackCacheGroup || {};
- const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
- const minSizeReduction = normalizeSizes(
- options.minSizeReduction,
- defaultSizeTypes
- );
- const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
- /** @type {SplitChunksOptions} */
- this.options = {
- chunksFilter: normalizeChunksFilter(options.chunks || "all"),
- defaultSizeTypes,
- minSize,
- minSizeReduction,
- minRemainingSize: mergeSizes(
- normalizeSizes(options.minRemainingSize, defaultSizeTypes),
- minSize
- ),
- enforceSizeThreshold: normalizeSizes(
- options.enforceSizeThreshold,
- defaultSizeTypes
- ),
- maxAsyncSize: mergeSizes(
- normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
- maxSize
- ),
- maxInitialSize: mergeSizes(
- normalizeSizes(options.maxInitialSize, defaultSizeTypes),
- maxSize
- ),
- minChunks: options.minChunks || 1,
- maxAsyncRequests: options.maxAsyncRequests || 1,
- maxInitialRequests: options.maxInitialRequests || 1,
- hidePathInfo: options.hidePathInfo || false,
- filename: options.filename || undefined,
- getCacheGroups: normalizeCacheGroups(
- options.cacheGroups,
- defaultSizeTypes
- ),
- getName: options.name ? normalizeName(options.name) : defaultGetName,
- automaticNameDelimiter: options.automaticNameDelimiter,
- usedExports: options.usedExports,
- fallbackCacheGroup: {
- chunksFilter: normalizeChunksFilter(
- fallbackCacheGroup.chunks || options.chunks || "all"
- ),
- minSize: mergeSizes(
- normalizeSizes(fallbackCacheGroup.minSize, defaultSizeTypes),
- minSize
- ),
- maxAsyncSize: mergeSizes(
- normalizeSizes(fallbackCacheGroup.maxAsyncSize, defaultSizeTypes),
- normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
- normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
- normalizeSizes(options.maxSize, defaultSizeTypes)
- ),
- maxInitialSize: mergeSizes(
- normalizeSizes(fallbackCacheGroup.maxInitialSize, defaultSizeTypes),
- normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
- normalizeSizes(options.maxInitialSize, defaultSizeTypes),
- normalizeSizes(options.maxSize, defaultSizeTypes)
- ),
- automaticNameDelimiter:
- fallbackCacheGroup.automaticNameDelimiter ||
- options.automaticNameDelimiter ||
- "~"
- }
- };
- /** @type {WeakMap<CacheGroupSource, CacheGroup>} */
- this._cacheGroupCache = new WeakMap();
- }
- /**
- * @param {CacheGroupSource} cacheGroupSource source
- * @returns {CacheGroup} the cache group (cached)
- */
- _getCacheGroup(cacheGroupSource) {
- const cacheEntry = this._cacheGroupCache.get(cacheGroupSource);
- if (cacheEntry !== undefined) return cacheEntry;
- const minSize = mergeSizes(
- cacheGroupSource.minSize,
- cacheGroupSource.enforce ? undefined : this.options.minSize
- );
- const minSizeReduction = mergeSizes(
- cacheGroupSource.minSizeReduction,
- cacheGroupSource.enforce ? undefined : this.options.minSizeReduction
- );
- const minRemainingSize = mergeSizes(
- cacheGroupSource.minRemainingSize,
- cacheGroupSource.enforce ? undefined : this.options.minRemainingSize
- );
- const enforceSizeThreshold = mergeSizes(
- cacheGroupSource.enforceSizeThreshold,
- cacheGroupSource.enforce ? undefined : this.options.enforceSizeThreshold
- );
- const cacheGroup = {
- key: cacheGroupSource.key,
- priority: cacheGroupSource.priority || 0,
- chunksFilter: cacheGroupSource.chunksFilter || this.options.chunksFilter,
- minSize,
- minSizeReduction,
- minRemainingSize,
- enforceSizeThreshold,
- maxAsyncSize: mergeSizes(
- cacheGroupSource.maxAsyncSize,
- cacheGroupSource.enforce ? undefined : this.options.maxAsyncSize
- ),
- maxInitialSize: mergeSizes(
- cacheGroupSource.maxInitialSize,
- cacheGroupSource.enforce ? undefined : this.options.maxInitialSize
- ),
- minChunks:
- cacheGroupSource.minChunks !== undefined
- ? cacheGroupSource.minChunks
- : cacheGroupSource.enforce
- ? 1
- : this.options.minChunks,
- maxAsyncRequests:
- cacheGroupSource.maxAsyncRequests !== undefined
- ? cacheGroupSource.maxAsyncRequests
- : cacheGroupSource.enforce
- ? Infinity
- : this.options.maxAsyncRequests,
- maxInitialRequests:
- cacheGroupSource.maxInitialRequests !== undefined
- ? cacheGroupSource.maxInitialRequests
- : cacheGroupSource.enforce
- ? Infinity
- : this.options.maxInitialRequests,
- getName:
- cacheGroupSource.getName !== undefined
- ? cacheGroupSource.getName
- : this.options.getName,
- usedExports:
- cacheGroupSource.usedExports !== undefined
- ? cacheGroupSource.usedExports
- : this.options.usedExports,
- filename:
- cacheGroupSource.filename !== undefined
- ? cacheGroupSource.filename
- : this.options.filename,
- automaticNameDelimiter:
- cacheGroupSource.automaticNameDelimiter !== undefined
- ? cacheGroupSource.automaticNameDelimiter
- : this.options.automaticNameDelimiter,
- idHint:
- cacheGroupSource.idHint !== undefined
- ? cacheGroupSource.idHint
- : cacheGroupSource.key,
- reuseExistingChunk: cacheGroupSource.reuseExistingChunk || false,
- _validateSize: hasNonZeroSizes(minSize),
- _validateRemainingSize: hasNonZeroSizes(minRemainingSize),
- _minSizeForMaxSize: mergeSizes(
- cacheGroupSource.minSize,
- this.options.minSize
- ),
- _conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold)
- };
- this._cacheGroupCache.set(cacheGroupSource, cacheGroup);
- return cacheGroup;
- }
- /**
- * Apply the plugin
- * @param {Compiler} compiler the compiler instance
- * @returns {void}
- */
- apply(compiler) {
- const cachedMakePathsRelative = makePathsRelative.bindContextCache(
- compiler.context,
- compiler.root
- );
- compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => {
- const logger = compilation.getLogger("webpack.SplitChunksPlugin");
- let alreadyOptimized = false;
- compilation.hooks.unseal.tap("SplitChunksPlugin", () => {
- alreadyOptimized = false;
- });
- compilation.hooks.optimizeChunks.tap(
- {
- name: "SplitChunksPlugin",
- stage: STAGE_ADVANCED
- },
- chunks => {
- if (alreadyOptimized) return;
- alreadyOptimized = true;
- logger.time("prepare");
- const chunkGraph = compilation.chunkGraph;
- const moduleGraph = compilation.moduleGraph;
- // Give each selected chunk an index (to create strings from chunks)
- /** @type {Map<Chunk, bigint>} */
- const chunkIndexMap = new Map();
- const ZERO = BigInt("0");
- const ONE = BigInt("1");
- const START = ONE << BigInt("31");
- let index = START;
- for (const chunk of chunks) {
- chunkIndexMap.set(
- chunk,
- index | BigInt((Math.random() * 0x7fffffff) | 0)
- );
- index = index << ONE;
- }
- /**
- * @param {Iterable<Chunk>} chunks list of chunks
- * @returns {bigint | Chunk} key of the chunks
- */
- const getKey = chunks => {
- const iterator = chunks[Symbol.iterator]();
- let result = iterator.next();
- if (result.done) return ZERO;
- const first = result.value;
- result = iterator.next();
- if (result.done) return first;
- let key =
- chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
- while (!(result = iterator.next()).done) {
- const raw = chunkIndexMap.get(result.value);
- key = key ^ raw;
- }
- return key;
- };
- /**
- * @param {bigint | Chunk} key key of the chunks
- * @returns {string} stringified key
- */
- const keyToString = key => {
- if (typeof key === "bigint") return key.toString(16);
- return chunkIndexMap.get(key).toString(16);
- };
- const getChunkSetsInGraph = memoize(() => {
- /** @type {Map<bigint, Set<Chunk>>} */
- const chunkSetsInGraph = new Map();
- /** @type {Set<Chunk>} */
- const singleChunkSets = new Set();
- for (const module of compilation.modules) {
- const chunks = chunkGraph.getModuleChunksIterable(module);
- const chunksKey = getKey(chunks);
- if (typeof chunksKey === "bigint") {
- if (!chunkSetsInGraph.has(chunksKey)) {
- chunkSetsInGraph.set(chunksKey, new Set(chunks));
- }
- } else {
- singleChunkSets.add(chunksKey);
- }
- }
- return { chunkSetsInGraph, singleChunkSets };
- });
- /**
- * @param {Module} module the module
- * @returns {Iterable<Chunk[]>} groups of chunks with equal exports
- */
- const groupChunksByExports = module => {
- const exportsInfo = moduleGraph.getExportsInfo(module);
- const groupedByUsedExports = new Map();
- for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
- const key = exportsInfo.getUsageKey(chunk.runtime);
- const list = groupedByUsedExports.get(key);
- if (list !== undefined) {
- list.push(chunk);
- } else {
- groupedByUsedExports.set(key, [chunk]);
- }
- }
- return groupedByUsedExports.values();
- };
- /** @type {Map<Module, Iterable<Chunk[]>>} */
- const groupedByExportsMap = new Map();
- const getExportsChunkSetsInGraph = memoize(() => {
- /** @type {Map<bigint, Set<Chunk>>} */
- const chunkSetsInGraph = new Map();
- /** @type {Set<Chunk>} */
- const singleChunkSets = new Set();
- for (const module of compilation.modules) {
- const groupedChunks = Array.from(groupChunksByExports(module));
- groupedByExportsMap.set(module, groupedChunks);
- for (const chunks of groupedChunks) {
- if (chunks.length === 1) {
- singleChunkSets.add(chunks[0]);
- } else {
- const chunksKey = /** @type {bigint} */ (getKey(chunks));
- if (!chunkSetsInGraph.has(chunksKey)) {
- chunkSetsInGraph.set(chunksKey, new Set(chunks));
- }
- }
- }
- }
- return { chunkSetsInGraph, singleChunkSets };
- });
- // group these set of chunks by count
- // to allow to check less sets via isSubset
- // (only smaller sets can be subset)
- /**
- * @param {IterableIterator<Set<Chunk>>} chunkSets set of sets of chunks
- * @returns {Map<number, Array<Set<Chunk>>>} map of sets of chunks by count
- */
- const groupChunkSetsByCount = chunkSets => {
- /** @type {Map<number, Array<Set<Chunk>>>} */
- const chunkSetsByCount = new Map();
- for (const chunksSet of chunkSets) {
- const count = chunksSet.size;
- let array = chunkSetsByCount.get(count);
- if (array === undefined) {
- array = [];
- chunkSetsByCount.set(count, array);
- }
- array.push(chunksSet);
- }
- return chunkSetsByCount;
- };
- const getChunkSetsByCount = memoize(() =>
- groupChunkSetsByCount(
- getChunkSetsInGraph().chunkSetsInGraph.values()
- )
- );
- const getExportsChunkSetsByCount = memoize(() =>
- groupChunkSetsByCount(
- getExportsChunkSetsInGraph().chunkSetsInGraph.values()
- )
- );
- // Create a list of possible combinations
- const createGetCombinations = (
- chunkSets,
- singleChunkSets,
- chunkSetsByCount
- ) => {
- /** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
- const combinationsCache = new Map();
- return key => {
- const cacheEntry = combinationsCache.get(key);
- if (cacheEntry !== undefined) return cacheEntry;
- if (key instanceof Chunk) {
- const result = [key];
- combinationsCache.set(key, result);
- return result;
- }
- const chunksSet = chunkSets.get(key);
- /** @type {(Set<Chunk> | Chunk)[]} */
- const array = [chunksSet];
- for (const [count, setArray] of chunkSetsByCount) {
- // "equal" is not needed because they would have been merge in the first step
- if (count < chunksSet.size) {
- for (const set of setArray) {
- if (isSubset(chunksSet, set)) {
- array.push(set);
- }
- }
- }
- }
- for (const chunk of singleChunkSets) {
- if (chunksSet.has(chunk)) {
- array.push(chunk);
- }
- }
- combinationsCache.set(key, array);
- return array;
- };
- };
- const getCombinationsFactory = memoize(() => {
- const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
- return createGetCombinations(
- chunkSetsInGraph,
- singleChunkSets,
- getChunkSetsByCount()
- );
- });
- const getCombinations = key => getCombinationsFactory()(key);
- const getExportsCombinationsFactory = memoize(() => {
- const { chunkSetsInGraph, singleChunkSets } =
- getExportsChunkSetsInGraph();
- return createGetCombinations(
- chunkSetsInGraph,
- singleChunkSets,
- getExportsChunkSetsByCount()
- );
- });
- const getExportsCombinations = key =>
- getExportsCombinationsFactory()(key);
- /**
- * @typedef {object} SelectedChunksResult
- * @property {Chunk[]} chunks the list of chunks
- * @property {bigint | Chunk} key a key of the list
- */
- /** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
- const selectedChunksCacheByChunksSet = new WeakMap();
- /**
- * get list and key by applying the filter function to the list
- * It is cached for performance reasons
- * @param {Set<Chunk> | Chunk} chunks list of chunks
- * @param {ChunkFilterFunction} chunkFilter filter function for chunks
- * @returns {SelectedChunksResult} list and key
- */
- const getSelectedChunks = (chunks, chunkFilter) => {
- let entry = selectedChunksCacheByChunksSet.get(chunks);
- if (entry === undefined) {
- entry = new WeakMap();
- selectedChunksCacheByChunksSet.set(chunks, entry);
- }
- let entry2 =
- /** @type {SelectedChunksResult} */
- (entry.get(chunkFilter));
- if (entry2 === undefined) {
- /** @type {Chunk[]} */
- const selectedChunks = [];
- if (chunks instanceof Chunk) {
- if (chunkFilter(chunks)) selectedChunks.push(chunks);
- } else {
- for (const chunk of chunks) {
- if (chunkFilter(chunk)) selectedChunks.push(chunk);
- }
- }
- entry2 = {
- chunks: selectedChunks,
- key: getKey(selectedChunks)
- };
- entry.set(chunkFilter, entry2);
- }
- return entry2;
- };
- /** @type {Map<string, boolean>} */
- const alreadyValidatedParents = new Map();
- /** @type {Set<string>} */
- const alreadyReportedErrors = new Set();
- // Map a list of chunks to a list of modules
- // For the key the chunk "index" is used, the value is a SortableSet of modules
- /** @type {Map<string, ChunksInfoItem>} */
- const chunksInfoMap = new Map();
- /**
- * @param {CacheGroup} cacheGroup the current cache group
- * @param {number} cacheGroupIndex the index of the cache group of ordering
- * @param {Chunk[]} selectedChunks chunks selected for this module
- * @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
- * @param {Module} module the current module
- * @returns {void}
- */
- const addModuleToChunksInfoMap = (
- cacheGroup,
- cacheGroupIndex,
- selectedChunks,
- selectedChunksKey,
- module
- ) => {
- // Break if minimum number of chunks is not reached
- if (selectedChunks.length < cacheGroup.minChunks) return;
- // Determine name for split chunk
- const name =
- /** @type {string} */
- (cacheGroup.getName(module, selectedChunks, cacheGroup.key));
- // Check if the name is ok
- const existingChunk = compilation.namedChunks.get(name);
- if (existingChunk) {
- const parentValidationKey = `${name}|${
- typeof selectedChunksKey === "bigint"
- ? selectedChunksKey
- : selectedChunksKey.debugId
- }`;
- const valid = alreadyValidatedParents.get(parentValidationKey);
- if (valid === false) return;
- if (valid === undefined) {
- // Module can only be moved into the existing chunk if the existing chunk
- // is a parent of all selected chunks
- let isInAllParents = true;
- /** @type {Set<ChunkGroup>} */
- const queue = new Set();
- for (const chunk of selectedChunks) {
- for (const group of chunk.groupsIterable) {
- queue.add(group);
- }
- }
- for (const group of queue) {
- if (existingChunk.isInGroup(group)) continue;
- let hasParent = false;
- for (const parent of group.parentsIterable) {
- hasParent = true;
- queue.add(parent);
- }
- if (!hasParent) {
- isInAllParents = false;
- }
- }
- const valid = isInAllParents;
- alreadyValidatedParents.set(parentValidationKey, valid);
- if (!valid) {
- if (!alreadyReportedErrors.has(name)) {
- alreadyReportedErrors.add(name);
- compilation.errors.push(
- new WebpackError(
- "SplitChunksPlugin\n" +
- `Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` +
- `Both have the same name "${name}" and existing chunk is not a parent of the selected modules.\n` +
- "Use a different name for the cache group or make sure that the existing chunk is a parent (e. g. via dependOn).\n" +
- 'HINT: You can omit "name" to automatically create a name.\n' +
- "BREAKING CHANGE: webpack < 5 used to allow to use an entrypoint as splitChunk. " +
- "This is no longer allowed when the entrypoint is not a parent of the selected modules.\n" +
- "Remove this entrypoint and add modules to cache group's 'test' instead. " +
- "If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " +
- "See migration guide of more info."
- )
- );
- }
- return;
- }
- }
- }
- // Create key for maps
- // When it has a name we use the name as key
- // Otherwise we create the key from chunks and cache group key
- // This automatically merges equal names
- const key =
- cacheGroup.key +
- (name
- ? ` name:${name}`
- : ` chunks:${keyToString(selectedChunksKey)}`);
- // Add module to maps
- let info = /** @type {ChunksInfoItem} */ (chunksInfoMap.get(key));
- if (info === undefined) {
- chunksInfoMap.set(
- key,
- (info = {
- modules: new SortableSet(
- undefined,
- compareModulesByIdentifier
- ),
- cacheGroup,
- cacheGroupIndex,
- name,
- sizes: {},
- chunks: new Set(),
- reusableChunks: new Set(),
- chunksKeys: new Set()
- })
- );
- }
- const oldSize = info.modules.size;
- info.modules.add(module);
- if (info.modules.size !== oldSize) {
- for (const type of module.getSourceTypes()) {
- info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
- }
- }
- const oldChunksKeysSize = info.chunksKeys.size;
- info.chunksKeys.add(selectedChunksKey);
- if (oldChunksKeysSize !== info.chunksKeys.size) {
- for (const chunk of selectedChunks) {
- info.chunks.add(chunk);
- }
- }
- };
- const context = {
- moduleGraph,
- chunkGraph
- };
- logger.timeEnd("prepare");
- logger.time("modules");
- // Walk through all modules
- for (const module of compilation.modules) {
- // Get cache group
- const cacheGroups = this.options.getCacheGroups(module, context);
- if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
- continue;
- }
- // Prepare some values (usedExports = false)
- const getCombs = memoize(() => {
- const chunks = chunkGraph.getModuleChunksIterable(module);
- const chunksKey = getKey(chunks);
- return getCombinations(chunksKey);
- });
- // Prepare some values (usedExports = true)
- const getCombsByUsedExports = memoize(() => {
- // fill the groupedByExportsMap
- getExportsChunkSetsInGraph();
- /** @type {Set<Set<Chunk> | Chunk>} */
- const set = new Set();
- const groupedByUsedExports =
- /** @type {Iterable<Chunk[]>} */
- (groupedByExportsMap.get(module));
- for (const chunks of groupedByUsedExports) {
- const chunksKey = getKey(chunks);
- for (const comb of getExportsCombinations(chunksKey))
- set.add(comb);
- }
- return set;
- });
- let cacheGroupIndex = 0;
- for (const cacheGroupSource of cacheGroups) {
- const cacheGroup = this._getCacheGroup(cacheGroupSource);
- const combs = cacheGroup.usedExports
- ? getCombsByUsedExports()
- : getCombs();
- // For all combination of chunk selection
- for (const chunkCombination of combs) {
- // Break if minimum number of chunks is not reached
- const count =
- chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
- if (count < cacheGroup.minChunks) continue;
- // Select chunks by configuration
- const { chunks: selectedChunks, key: selectedChunksKey } =
- getSelectedChunks(
- chunkCombination,
- /** @type {ChunkFilterFunction} */ (cacheGroup.chunksFilter)
- );
- addModuleToChunksInfoMap(
- cacheGroup,
- cacheGroupIndex,
- selectedChunks,
- selectedChunksKey,
- module
- );
- }
- cacheGroupIndex++;
- }
- }
- logger.timeEnd("modules");
- logger.time("queue");
- /**
- * @param {ChunksInfoItem} info entry
- * @param {string[]} sourceTypes source types to be removed
- */
- const removeModulesWithSourceType = (info, sourceTypes) => {
- for (const module of info.modules) {
- const types = module.getSourceTypes();
- if (sourceTypes.some(type => types.has(type))) {
- info.modules.delete(module);
- for (const type of types) {
- info.sizes[type] -= module.size(type);
- }
- }
- }
- };
- /**
- * @param {ChunksInfoItem} info entry
- * @returns {boolean} true, if entry become empty
- */
- const removeMinSizeViolatingModules = info => {
- if (!info.cacheGroup._validateSize) return false;
- const violatingSizes = getViolatingMinSizes(
- info.sizes,
- info.cacheGroup.minSize
- );
- if (violatingSizes === undefined) return false;
- removeModulesWithSourceType(info, violatingSizes);
- return info.modules.size === 0;
- };
- // Filter items were size < minSize
- for (const [key, info] of chunksInfoMap) {
- if (removeMinSizeViolatingModules(info)) {
- chunksInfoMap.delete(key);
- } else if (
- !checkMinSizeReduction(
- info.sizes,
- info.cacheGroup.minSizeReduction,
- info.chunks.size
- )
- ) {
- chunksInfoMap.delete(key);
- }
- }
- /**
- * @typedef {object} MaxSizeQueueItem
- * @property {SplitChunksSizes} minSize
- * @property {SplitChunksSizes} maxAsyncSize
- * @property {SplitChunksSizes} maxInitialSize
- * @property {string} automaticNameDelimiter
- * @property {string[]} keys
- */
- /** @type {Map<Chunk, MaxSizeQueueItem>} */
- const maxSizeQueueMap = new Map();
- while (chunksInfoMap.size > 0) {
- // Find best matching entry
- let bestEntryKey;
- let bestEntry;
- for (const pair of chunksInfoMap) {
- const key = pair[0];
- const info = pair[1];
- if (
- bestEntry === undefined ||
- compareEntries(bestEntry, info) < 0
- ) {
- bestEntry = info;
- bestEntryKey = key;
- }
- }
- const item = /** @type {ChunksInfoItem} */ (bestEntry);
- chunksInfoMap.delete(/** @type {string} */ (bestEntryKey));
- /** @type {Chunk["name"] | undefined} */
- let chunkName = item.name;
- // Variable for the new chunk (lazy created)
- /** @type {Chunk | undefined} */
- let newChunk;
- // When no chunk name, check if we can reuse a chunk instead of creating a new one
- let isExistingChunk = false;
- let isReusedWithAllModules = false;
- if (chunkName) {
- const chunkByName = compilation.namedChunks.get(chunkName);
- if (chunkByName !== undefined) {
- newChunk = chunkByName;
- const oldSize = item.chunks.size;
- item.chunks.delete(newChunk);
- isExistingChunk = item.chunks.size !== oldSize;
- }
- } else if (item.cacheGroup.reuseExistingChunk) {
- outer: for (const chunk of item.chunks) {
- if (
- chunkGraph.getNumberOfChunkModules(chunk) !==
- item.modules.size
- ) {
- continue;
- }
- if (
- item.chunks.size > 1 &&
- chunkGraph.getNumberOfEntryModules(chunk) > 0
- ) {
- continue;
- }
- for (const module of item.modules) {
- if (!chunkGraph.isModuleInChunk(module, chunk)) {
- continue outer;
- }
- }
- if (!newChunk || !newChunk.name) {
- newChunk = chunk;
- } else if (
- chunk.name &&
- chunk.name.length < newChunk.name.length
- ) {
- newChunk = chunk;
- } else if (
- chunk.name &&
- chunk.name.length === newChunk.name.length &&
- chunk.name < newChunk.name
- ) {
- newChunk = chunk;
- }
- }
- if (newChunk) {
- item.chunks.delete(newChunk);
- chunkName = undefined;
- isExistingChunk = true;
- isReusedWithAllModules = true;
- }
- }
- const enforced =
- item.cacheGroup._conditionalEnforce &&
- checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
- const usedChunks = new Set(item.chunks);
- // Check if maxRequests condition can be fulfilled
- if (
- !enforced &&
- (Number.isFinite(item.cacheGroup.maxInitialRequests) ||
- Number.isFinite(item.cacheGroup.maxAsyncRequests))
- ) {
- for (const chunk of usedChunks) {
- // respect max requests
- const maxRequests = /** @type {number} */ (
- chunk.isOnlyInitial()
- ? item.cacheGroup.maxInitialRequests
- : chunk.canBeInitial()
- ? Math.min(
- /** @type {number} */
- (item.cacheGroup.maxInitialRequests),
- /** @type {number} */
- (item.cacheGroup.maxAsyncRequests)
- )
- : item.cacheGroup.maxAsyncRequests
- );
- if (
- Number.isFinite(maxRequests) &&
- getRequests(chunk) >= maxRequests
- ) {
- usedChunks.delete(chunk);
- }
- }
- }
- outer: for (const chunk of usedChunks) {
- for (const module of item.modules) {
- if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
- }
- usedChunks.delete(chunk);
- }
- // Were some (invalid) chunks removed from usedChunks?
- // => readd all modules to the queue, as things could have been changed
- if (usedChunks.size < item.chunks.size) {
- if (isExistingChunk)
- usedChunks.add(/** @type {Chunk} */ (newChunk));
- if (
- /** @type {number} */ (usedChunks.size) >=
- /** @type {number} */ (item.cacheGroup.minChunks)
- ) {
- const chunksArr = Array.from(usedChunks);
- for (const module of item.modules) {
- addModuleToChunksInfoMap(
- item.cacheGroup,
- item.cacheGroupIndex,
- chunksArr,
- getKey(usedChunks),
- module
- );
- }
- }
- continue;
- }
- // Validate minRemainingSize constraint when a single chunk is left over
- if (
- !enforced &&
- item.cacheGroup._validateRemainingSize &&
- usedChunks.size === 1
- ) {
- const [chunk] = usedChunks;
- const chunkSizes = Object.create(null);
- for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
- if (!item.modules.has(module)) {
- for (const type of module.getSourceTypes()) {
- chunkSizes[type] =
- (chunkSizes[type] || 0) + module.size(type);
- }
- }
- }
- const violatingSizes = getViolatingMinSizes(
- chunkSizes,
- item.cacheGroup.minRemainingSize
- );
- if (violatingSizes !== undefined) {
- const oldModulesSize = item.modules.size;
- removeModulesWithSourceType(item, violatingSizes);
- if (
- item.modules.size > 0 &&
- item.modules.size !== oldModulesSize
- ) {
- // queue this item again to be processed again
- // without violating modules
- chunksInfoMap.set(/** @type {string} */ (bestEntryKey), item);
- }
- continue;
- }
- }
- // Create the new chunk if not reusing one
- if (newChunk === undefined) {
- newChunk = compilation.addChunk(chunkName);
- }
- // Walk through all chunks
- for (const chunk of usedChunks) {
- // Add graph connections for splitted chunk
- chunk.split(newChunk);
- }
- // Add a note to the chunk
- newChunk.chunkReason =
- (newChunk.chunkReason ? `${newChunk.chunkReason}, ` : "") +
- (isReusedWithAllModules
- ? "reused as split chunk"
- : "split chunk");
- if (item.cacheGroup.key) {
- newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`;
- }
- if (chunkName) {
- newChunk.chunkReason += ` (name: ${chunkName})`;
- }
- if (item.cacheGroup.filename) {
- newChunk.filenameTemplate = item.cacheGroup.filename;
- }
- if (item.cacheGroup.idHint) {
- newChunk.idNameHints.add(item.cacheGroup.idHint);
- }
- if (!isReusedWithAllModules) {
- // Add all modules to the new chunk
- for (const module of item.modules) {
- if (!module.chunkCondition(newChunk, compilation)) continue;
- // Add module to new chunk
- chunkGraph.connectChunkAndModule(newChunk, module);
- // Remove module from used chunks
- for (const chunk of usedChunks) {
- chunkGraph.disconnectChunkAndModule(chunk, module);
- }
- }
- } else {
- // Remove all modules from used chunks
- for (const module of item.modules) {
- for (const chunk of usedChunks) {
- chunkGraph.disconnectChunkAndModule(chunk, module);
- }
- }
- }
- if (
- Object.keys(item.cacheGroup.maxAsyncSize).length > 0 ||
- Object.keys(item.cacheGroup.maxInitialSize).length > 0
- ) {
- const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
- maxSizeQueueMap.set(newChunk, {
- minSize: oldMaxSizeSettings
- ? combineSizes(
- oldMaxSizeSettings.minSize,
- item.cacheGroup._minSizeForMaxSize,
- Math.max
- )
- : item.cacheGroup.minSize,
- maxAsyncSize: oldMaxSizeSettings
- ? combineSizes(
- oldMaxSizeSettings.maxAsyncSize,
- item.cacheGroup.maxAsyncSize,
- Math.min
- )
- : item.cacheGroup.maxAsyncSize,
- maxInitialSize: oldMaxSizeSettings
- ? combineSizes(
- oldMaxSizeSettings.maxInitialSize,
- item.cacheGroup.maxInitialSize,
- Math.min
- )
- : item.cacheGroup.maxInitialSize,
- automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
- keys: oldMaxSizeSettings
- ? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
- : [item.cacheGroup.key]
- });
- }
- // remove all modules from other entries and update size
- for (const [key, info] of chunksInfoMap) {
- if (isOverlap(info.chunks, usedChunks)) {
- // update modules and total size
- // may remove it from the map when < minSize
- let updated = false;
- for (const module of item.modules) {
- if (info.modules.has(module)) {
- // remove module
- info.modules.delete(module);
- // update size
- for (const key of module.getSourceTypes()) {
- info.sizes[key] -= module.size(key);
- }
- updated = true;
- }
- }
- if (updated) {
- if (info.modules.size === 0) {
- chunksInfoMap.delete(key);
- continue;
- }
- if (
- removeMinSizeViolatingModules(info) ||
- !checkMinSizeReduction(
- info.sizes,
- info.cacheGroup.minSizeReduction,
- info.chunks.size
- )
- ) {
- chunksInfoMap.delete(key);
- continue;
- }
- }
- }
- }
- }
- logger.timeEnd("queue");
- logger.time("maxSize");
- /** @type {Set<string>} */
- const incorrectMinMaxSizeSet = new Set();
- const { outputOptions } = compilation;
- // Make sure that maxSize is fulfilled
- const { fallbackCacheGroup } = this.options;
- for (const chunk of Array.from(compilation.chunks)) {
- const chunkConfig = maxSizeQueueMap.get(chunk);
- const {
- minSize,
- maxAsyncSize,
- maxInitialSize,
- automaticNameDelimiter
- } = chunkConfig || fallbackCacheGroup;
- if (!chunkConfig && !fallbackCacheGroup.chunksFilter(chunk))
- continue;
- /** @type {SplitChunksSizes} */
- let maxSize;
- if (chunk.isOnlyInitial()) {
- maxSize = maxInitialSize;
- } else if (chunk.canBeInitial()) {
- maxSize = combineSizes(maxAsyncSize, maxInitialSize, Math.min);
- } else {
- maxSize = maxAsyncSize;
- }
- if (Object.keys(maxSize).length === 0) {
- continue;
- }
- for (const key of Object.keys(maxSize)) {
- const maxSizeValue = maxSize[key];
- const minSizeValue = minSize[key];
- if (
- typeof minSizeValue === "number" &&
- minSizeValue > maxSizeValue
- ) {
- const keys = chunkConfig && chunkConfig.keys;
- const warningKey = `${
- keys && keys.join()
- } ${minSizeValue} ${maxSizeValue}`;
- if (!incorrectMinMaxSizeSet.has(warningKey)) {
- incorrectMinMaxSizeSet.add(warningKey);
- compilation.warnings.push(
- new MinMaxSizeWarning(keys, minSizeValue, maxSizeValue)
- );
- }
- }
- }
- const results = deterministicGroupingForModules({
- minSize,
- maxSize: mapObject(maxSize, (value, key) => {
- const minSizeValue = minSize[key];
- return typeof minSizeValue === "number"
- ? Math.max(value, minSizeValue)
- : value;
- }),
- items: chunkGraph.getChunkModulesIterable(chunk),
- getKey(module) {
- const cache = getKeyCache.get(module);
- if (cache !== undefined) return cache;
- const ident = cachedMakePathsRelative(module.identifier());
- const nameForCondition =
- module.nameForCondition && module.nameForCondition();
- const name = nameForCondition
- ? cachedMakePathsRelative(nameForCondition)
- : ident.replace(/^.*!|\?[^?!]*$/g, "");
- const fullKey =
- name +
- automaticNameDelimiter +
- hashFilename(ident, outputOptions);
- const key = requestToId(fullKey);
- getKeyCache.set(module, key);
- return key;
- },
- getSize(module) {
- const size = Object.create(null);
- for (const key of module.getSourceTypes()) {
- size[key] = module.size(key);
- }
- return size;
- }
- });
- if (results.length <= 1) {
- continue;
- }
- for (let i = 0; i < results.length; i++) {
- const group = results[i];
- const key = this.options.hidePathInfo
- ? hashFilename(group.key, outputOptions)
- : group.key;
- let name = chunk.name
- ? chunk.name + automaticNameDelimiter + key
- : null;
- if (name && name.length > 100) {
- name =
- name.slice(0, 100) +
- automaticNameDelimiter +
- hashFilename(name, outputOptions);
- }
- if (i !== results.length - 1) {
- const newPart = compilation.addChunk(
- /** @type {Chunk["name"]} */ (name)
- );
- chunk.split(newPart);
- newPart.chunkReason = chunk.chunkReason;
- // Add all modules to the new chunk
- for (const module of group.items) {
- if (!module.chunkCondition(newPart, compilation)) {
- continue;
- }
- // Add module to new chunk
- chunkGraph.connectChunkAndModule(newPart, module);
- // Remove module from used chunks
- chunkGraph.disconnectChunkAndModule(chunk, module);
- }
- } else {
- // change the chunk to be a part
- chunk.name = /** @type {Chunk["name"]} */ (name);
- }
- }
- }
- logger.timeEnd("maxSize");
- }
- );
- });
- }
- };
|