LimitChunkCountPlugin.js 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
  1. /*
  2. MIT License http://www.opensource.org/licenses/mit-license.php
  3. Author Tobias Koppers @sokra
  4. */
  5. "use strict";
  6. const { STAGE_ADVANCED } = require("../OptimizationStages");
  7. const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
  8. const { compareChunks } = require("../util/comparators");
  9. const createSchemaValidation = require("../util/create-schema-validation");
  10. /** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
  11. /** @typedef {import("../Chunk")} Chunk */
  12. /** @typedef {import("../Compiler")} Compiler */
  13. const validate = createSchemaValidation(
  14. require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"),
  15. () => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"),
  16. {
  17. name: "Limit Chunk Count Plugin",
  18. baseDataPath: "options"
  19. }
  20. );
  21. /**
  22. * @typedef {object} ChunkCombination
  23. * @property {boolean} deleted this is set to true when combination was removed
  24. * @property {number} sizeDiff
  25. * @property {number} integratedSize
  26. * @property {Chunk} a
  27. * @property {Chunk} b
  28. * @property {number} aIdx
  29. * @property {number} bIdx
  30. * @property {number} aSize
  31. * @property {number} bSize
  32. */
  33. /**
  34. * @template K, V
  35. * @param {Map<K, Set<V>>} map map
  36. * @param {K} key key
  37. * @param {V} value value
  38. */
  39. const addToSetMap = (map, key, value) => {
  40. const set = map.get(key);
  41. if (set === undefined) {
  42. map.set(key, new Set([value]));
  43. } else {
  44. set.add(value);
  45. }
  46. };
  47. class LimitChunkCountPlugin {
  48. /**
  49. * @param {LimitChunkCountPluginOptions=} options options object
  50. */
  51. constructor(options) {
  52. validate(options);
  53. this.options = options;
  54. }
  55. /**
  56. * @param {Compiler} compiler the webpack compiler
  57. * @returns {void}
  58. */
  59. apply(compiler) {
  60. const options = this.options;
  61. compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
  62. compilation.hooks.optimizeChunks.tap(
  63. {
  64. name: "LimitChunkCountPlugin",
  65. stage: STAGE_ADVANCED
  66. },
  67. chunks => {
  68. const chunkGraph = compilation.chunkGraph;
  69. const maxChunks =
  70. /** @type {LimitChunkCountPluginOptions} */
  71. (options).maxChunks;
  72. if (!maxChunks) return;
  73. if (maxChunks < 1) return;
  74. if (compilation.chunks.size <= maxChunks) return;
  75. let remainingChunksToMerge = compilation.chunks.size - maxChunks;
  76. // order chunks in a deterministic way
  77. const compareChunksWithGraph = compareChunks(chunkGraph);
  78. const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph);
  79. // create a lazy sorted data structure to keep all combinations
  80. // this is large. Size = chunks * (chunks - 1) / 2
  81. // It uses a multi layer bucket sort plus normal sort in the last layer
  82. // It's also lazy so only accessed buckets are sorted
  83. const combinations = new LazyBucketSortedSet(
  84. // Layer 1: ordered by largest size benefit
  85. c => c.sizeDiff,
  86. (a, b) => b - a,
  87. // Layer 2: ordered by smallest combined size
  88. /**
  89. * @param {ChunkCombination} c combination
  90. * @returns {number} integrated size
  91. */
  92. c => c.integratedSize,
  93. (a, b) => a - b,
  94. // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
  95. /**
  96. * @param {ChunkCombination} c combination
  97. * @returns {number} position difference
  98. */
  99. c => c.bIdx - c.aIdx,
  100. (a, b) => a - b,
  101. // Layer 4: ordered by position in orderedChunk (-> to be deterministic)
  102. (a, b) => a.bIdx - b.bIdx
  103. );
  104. // we keep a mapping from chunk to all combinations
  105. // but this mapping is not kept up-to-date with deletions
  106. // so `deleted` flag need to be considered when iterating this
  107. /** @type {Map<Chunk, Set<ChunkCombination>>} */
  108. const combinationsByChunk = new Map();
  109. orderedChunks.forEach((b, bIdx) => {
  110. // create combination pairs with size and integrated size
  111. for (let aIdx = 0; aIdx < bIdx; aIdx++) {
  112. const a = orderedChunks[aIdx];
  113. // filter pairs that can not be integrated!
  114. if (!chunkGraph.canChunksBeIntegrated(a, b)) continue;
  115. const integratedSize = chunkGraph.getIntegratedChunksSize(
  116. a,
  117. b,
  118. options
  119. );
  120. const aSize = chunkGraph.getChunkSize(a, options);
  121. const bSize = chunkGraph.getChunkSize(b, options);
  122. const c = {
  123. deleted: false,
  124. sizeDiff: aSize + bSize - integratedSize,
  125. integratedSize,
  126. a,
  127. b,
  128. aIdx,
  129. bIdx,
  130. aSize,
  131. bSize
  132. };
  133. combinations.add(c);
  134. addToSetMap(combinationsByChunk, a, c);
  135. addToSetMap(combinationsByChunk, b, c);
  136. }
  137. return combinations;
  138. });
  139. // list of modified chunks during this run
  140. // combinations affected by this change are skipped to allow
  141. // further optimizations
  142. /** @type {Set<Chunk>} */
  143. const modifiedChunks = new Set();
  144. let changed = false;
  145. loop: while (true) {
  146. const combination = combinations.popFirst();
  147. if (combination === undefined) break;
  148. combination.deleted = true;
  149. const { a, b, integratedSize } = combination;
  150. // skip over pair when
  151. // one of the already merged chunks is a parent of one of the chunks
  152. if (modifiedChunks.size > 0) {
  153. const queue = new Set(a.groupsIterable);
  154. for (const group of b.groupsIterable) {
  155. queue.add(group);
  156. }
  157. for (const group of queue) {
  158. for (const mChunk of modifiedChunks) {
  159. if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
  160. // This is a potential pair which needs recalculation
  161. // We can't do that now, but it merge before following pairs
  162. // so we leave space for it, and consider chunks as modified
  163. // just for the worse case
  164. remainingChunksToMerge--;
  165. if (remainingChunksToMerge <= 0) break loop;
  166. modifiedChunks.add(a);
  167. modifiedChunks.add(b);
  168. continue loop;
  169. }
  170. }
  171. for (const parent of group.parentsIterable) {
  172. queue.add(parent);
  173. }
  174. }
  175. }
  176. // merge the chunks
  177. if (chunkGraph.canChunksBeIntegrated(a, b)) {
  178. chunkGraph.integrateChunks(a, b);
  179. compilation.chunks.delete(b);
  180. // flag chunk a as modified as further optimization are possible for all children here
  181. modifiedChunks.add(a);
  182. changed = true;
  183. remainingChunksToMerge--;
  184. if (remainingChunksToMerge <= 0) break;
  185. // Update all affected combinations
  186. // delete all combination with the removed chunk
  187. // we will use combinations with the kept chunk instead
  188. for (const combination of /** @type {Set<ChunkCombination>} */ (
  189. combinationsByChunk.get(a)
  190. )) {
  191. if (combination.deleted) continue;
  192. combination.deleted = true;
  193. combinations.delete(combination);
  194. }
  195. // Update combinations with the kept chunk with new sizes
  196. for (const combination of /** @type {Set<ChunkCombination>} */ (
  197. combinationsByChunk.get(b)
  198. )) {
  199. if (combination.deleted) continue;
  200. if (combination.a === b) {
  201. if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) {
  202. combination.deleted = true;
  203. combinations.delete(combination);
  204. continue;
  205. }
  206. // Update size
  207. const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
  208. a,
  209. combination.b,
  210. options
  211. );
  212. const finishUpdate = combinations.startUpdate(combination);
  213. combination.a = a;
  214. combination.integratedSize = newIntegratedSize;
  215. combination.aSize = integratedSize;
  216. combination.sizeDiff =
  217. combination.bSize + integratedSize - newIntegratedSize;
  218. finishUpdate();
  219. } else if (combination.b === b) {
  220. if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) {
  221. combination.deleted = true;
  222. combinations.delete(combination);
  223. continue;
  224. }
  225. // Update size
  226. const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
  227. combination.a,
  228. a,
  229. options
  230. );
  231. const finishUpdate = combinations.startUpdate(combination);
  232. combination.b = a;
  233. combination.integratedSize = newIntegratedSize;
  234. combination.bSize = integratedSize;
  235. combination.sizeDiff =
  236. integratedSize + combination.aSize - newIntegratedSize;
  237. finishUpdate();
  238. }
  239. }
  240. combinationsByChunk.set(
  241. a,
  242. /** @type {Set<ChunkCombination>} */ (
  243. combinationsByChunk.get(b)
  244. )
  245. );
  246. combinationsByChunk.delete(b);
  247. }
  248. }
  249. if (changed) return true;
  250. }
  251. );
  252. });
  253. }
  254. }
  255. module.exports = LimitChunkCountPlugin;