10
0

LimitChunkCountPlugin.js 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. /*
  2. MIT License http://www.opensource.org/licenses/mit-license.php
  3. Author Tobias Koppers @sokra
  4. */
  5. "use strict";
  6. const { STAGE_ADVANCED } = require("../OptimizationStages");
  7. const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
  8. const { compareChunks } = require("../util/comparators");
  9. const createSchemaValidation = require("../util/create-schema-validation");
  10. /** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
  11. /** @typedef {import("../Chunk")} Chunk */
  12. /** @typedef {import("../Compiler")} Compiler */
  13. const validate = createSchemaValidation(
  14. require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"),
  15. () => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"),
  16. {
  17. name: "Limit Chunk Count Plugin",
  18. baseDataPath: "options"
  19. }
  20. );
  21. /**
  22. * @typedef {object} ChunkCombination
  23. * @property {boolean} deleted this is set to true when combination was removed
  24. * @property {number} sizeDiff
  25. * @property {number} integratedSize
  26. * @property {Chunk} a
  27. * @property {Chunk} b
  28. * @property {number} aIdx
  29. * @property {number} bIdx
  30. * @property {number} aSize
  31. * @property {number} bSize
  32. */
  33. /**
  34. * @template K, V
  35. * @param {Map<K, Set<V>>} map map
  36. * @param {K} key key
  37. * @param {V} value value
  38. */
  39. const addToSetMap = (map, key, value) => {
  40. const set = map.get(key);
  41. if (set === undefined) {
  42. map.set(key, new Set([value]));
  43. } else {
  44. set.add(value);
  45. }
  46. };
  47. class LimitChunkCountPlugin {
  48. /**
  49. * @param {LimitChunkCountPluginOptions=} options options object
  50. */
  51. constructor(options) {
  52. validate(options);
  53. this.options = /** @type {LimitChunkCountPluginOptions} */ (options);
  54. }
  55. /**
  56. * @param {Compiler} compiler the webpack compiler
  57. * @returns {void}
  58. */
  59. apply(compiler) {
  60. const options = this.options;
  61. compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
  62. compilation.hooks.optimizeChunks.tap(
  63. {
  64. name: "LimitChunkCountPlugin",
  65. stage: STAGE_ADVANCED
  66. },
  67. chunks => {
  68. const chunkGraph = compilation.chunkGraph;
  69. const maxChunks = options.maxChunks;
  70. if (!maxChunks) return;
  71. if (maxChunks < 1) return;
  72. if (compilation.chunks.size <= maxChunks) return;
  73. let remainingChunksToMerge = compilation.chunks.size - maxChunks;
  74. // order chunks in a deterministic way
  75. const compareChunksWithGraph = compareChunks(chunkGraph);
  76. const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph);
  77. // create a lazy sorted data structure to keep all combinations
  78. // this is large. Size = chunks * (chunks - 1) / 2
  79. // It uses a multi layer bucket sort plus normal sort in the last layer
  80. // It's also lazy so only accessed buckets are sorted
  81. const combinations = new LazyBucketSortedSet(
  82. // Layer 1: ordered by largest size benefit
  83. c => c.sizeDiff,
  84. (a, b) => b - a,
  85. // Layer 2: ordered by smallest combined size
  86. /**
  87. * @param {ChunkCombination} c combination
  88. * @returns {number} integrated size
  89. */
  90. c => c.integratedSize,
  91. (a, b) => a - b,
  92. // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
  93. /**
  94. * @param {ChunkCombination} c combination
  95. * @returns {number} position difference
  96. */
  97. c => c.bIdx - c.aIdx,
  98. (a, b) => a - b,
  99. // Layer 4: ordered by position in orderedChunk (-> to be deterministic)
  100. (a, b) => a.bIdx - b.bIdx
  101. );
  102. // we keep a mapping from chunk to all combinations
  103. // but this mapping is not kept up-to-date with deletions
  104. // so `deleted` flag need to be considered when iterating this
  105. /** @type {Map<Chunk, Set<ChunkCombination>>} */
  106. const combinationsByChunk = new Map();
  107. for (const [bIdx, b] of orderedChunks.entries()) {
  108. // create combination pairs with size and integrated size
  109. for (let aIdx = 0; aIdx < bIdx; aIdx++) {
  110. const a = orderedChunks[aIdx];
  111. // filter pairs that can not be integrated!
  112. if (!chunkGraph.canChunksBeIntegrated(a, b)) continue;
  113. const integratedSize = chunkGraph.getIntegratedChunksSize(
  114. a,
  115. b,
  116. options
  117. );
  118. const aSize = chunkGraph.getChunkSize(a, options);
  119. const bSize = chunkGraph.getChunkSize(b, options);
  120. const c = {
  121. deleted: false,
  122. sizeDiff: aSize + bSize - integratedSize,
  123. integratedSize,
  124. a,
  125. b,
  126. aIdx,
  127. bIdx,
  128. aSize,
  129. bSize
  130. };
  131. combinations.add(c);
  132. addToSetMap(combinationsByChunk, a, c);
  133. addToSetMap(combinationsByChunk, b, c);
  134. }
  135. }
  136. // list of modified chunks during this run
  137. // combinations affected by this change are skipped to allow
  138. // further optimizations
  139. /** @type {Set<Chunk>} */
  140. const modifiedChunks = new Set();
  141. let changed = false;
  142. loop: while (true) {
  143. const combination = combinations.popFirst();
  144. if (combination === undefined) break;
  145. combination.deleted = true;
  146. const { a, b, integratedSize } = combination;
  147. // skip over pair when
  148. // one of the already merged chunks is a parent of one of the chunks
  149. if (modifiedChunks.size > 0) {
  150. const queue = new Set(a.groupsIterable);
  151. for (const group of b.groupsIterable) {
  152. queue.add(group);
  153. }
  154. for (const group of queue) {
  155. for (const mChunk of modifiedChunks) {
  156. if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
  157. // This is a potential pair which needs recalculation
  158. // We can't do that now, but it merge before following pairs
  159. // so we leave space for it, and consider chunks as modified
  160. // just for the worse case
  161. remainingChunksToMerge--;
  162. if (remainingChunksToMerge <= 0) break loop;
  163. modifiedChunks.add(a);
  164. modifiedChunks.add(b);
  165. continue loop;
  166. }
  167. }
  168. for (const parent of group.parentsIterable) {
  169. queue.add(parent);
  170. }
  171. }
  172. }
  173. // merge the chunks
  174. if (chunkGraph.canChunksBeIntegrated(a, b)) {
  175. chunkGraph.integrateChunks(a, b);
  176. compilation.chunks.delete(b);
  177. // flag chunk a as modified as further optimization are possible for all children here
  178. modifiedChunks.add(a);
  179. changed = true;
  180. remainingChunksToMerge--;
  181. if (remainingChunksToMerge <= 0) break;
  182. // Update all affected combinations
  183. // delete all combination with the removed chunk
  184. // we will use combinations with the kept chunk instead
  185. for (const combination of /** @type {Set<ChunkCombination>} */ (
  186. combinationsByChunk.get(a)
  187. )) {
  188. if (combination.deleted) continue;
  189. combination.deleted = true;
  190. combinations.delete(combination);
  191. }
  192. // Update combinations with the kept chunk with new sizes
  193. for (const combination of /** @type {Set<ChunkCombination>} */ (
  194. combinationsByChunk.get(b)
  195. )) {
  196. if (combination.deleted) continue;
  197. if (combination.a === b) {
  198. if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) {
  199. combination.deleted = true;
  200. combinations.delete(combination);
  201. continue;
  202. }
  203. // Update size
  204. const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
  205. a,
  206. combination.b,
  207. options
  208. );
  209. const finishUpdate = combinations.startUpdate(combination);
  210. combination.a = a;
  211. combination.integratedSize = newIntegratedSize;
  212. combination.aSize = integratedSize;
  213. combination.sizeDiff =
  214. combination.bSize + integratedSize - newIntegratedSize;
  215. finishUpdate();
  216. } else if (combination.b === b) {
  217. if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) {
  218. combination.deleted = true;
  219. combinations.delete(combination);
  220. continue;
  221. }
  222. // Update size
  223. const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
  224. combination.a,
  225. a,
  226. options
  227. );
  228. const finishUpdate = combinations.startUpdate(combination);
  229. combination.b = a;
  230. combination.integratedSize = newIntegratedSize;
  231. combination.bSize = integratedSize;
  232. combination.sizeDiff =
  233. integratedSize + combination.aSize - newIntegratedSize;
  234. finishUpdate();
  235. }
  236. }
  237. combinationsByChunk.set(
  238. a,
  239. /** @type {Set<ChunkCombination>} */ (
  240. combinationsByChunk.get(b)
  241. )
  242. );
  243. combinationsByChunk.delete(b);
  244. }
  245. }
  246. if (changed) return true;
  247. }
  248. );
  249. });
  250. }
  251. }
  252. module.exports = LimitChunkCountPlugin;