| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115 | /*	MIT License http://www.opensource.org/licenses/mit-license.php	Author Tobias Koppers @sokra*/"use strict";const { STAGE_BASIC } = require("../OptimizationStages");const { runtimeEqual } = require("../util/runtime");/** @typedef {import("../Compiler")} Compiler */class MergeDuplicateChunksPlugin {	/**	 * @param {Compiler} compiler the compiler	 * @returns {void}	 */	apply(compiler) {		compiler.hooks.compilation.tap(			"MergeDuplicateChunksPlugin",			compilation => {				compilation.hooks.optimizeChunks.tap(					{						name: "MergeDuplicateChunksPlugin",						stage: STAGE_BASIC					},					chunks => {						const { chunkGraph, moduleGraph } = compilation;						// remember already tested chunks for performance						const notDuplicates = new Set();						// for each chunk						for (const chunk of chunks) {							// track a Set of all chunk that could be duplicates							let possibleDuplicates;							for (const module of chunkGraph.getChunkModulesIterable(chunk)) {								if (possibleDuplicates === undefined) {									// when possibleDuplicates is not yet set,									// create a new Set from chunks of the current module									// including only chunks with the same number of modules									for (const dup of chunkGraph.getModuleChunksIterable(										module									)) {										if (											dup !== chunk &&											chunkGraph.getNumberOfChunkModules(chunk) ===												chunkGraph.getNumberOfChunkModules(dup) &&											!notDuplicates.has(dup)										) {											// delay allocating the new Set until here, reduce memory pressure											if (possibleDuplicates === undefined) {												possibleDuplicates = new Set();											}											possibleDuplicates.add(dup);										}									}									// when no chunk is possible we can break here									if (possibleDuplicates === undefined) break;								} else {									// validate existing possible duplicates									for (const dup of possibleDuplicates) {										// remove possible duplicate when module is not contained										if (!chunkGraph.isModuleInChunk(module, dup)) {											possibleDuplicates.delete(dup);										}									}									// when all chunks has been removed we can break here									if (possibleDuplicates.size === 0) break;								}							}							// when we found duplicates							if (								possibleDuplicates !== undefined &&								possibleDuplicates.size > 0							) {								outer: for (const otherChunk of possibleDuplicates) {									if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;									if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;									if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0)										continue;									if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) {										for (const module of chunkGraph.getChunkModulesIterable(											chunk										)) {											const exportsInfo = moduleGraph.getExportsInfo(module);											if (												!exportsInfo.isEquallyUsed(													chunk.runtime,													otherChunk.runtime												)											) {												continue outer;											}										}									}									// merge them									if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) {										chunkGraph.integrateChunks(chunk, otherChunk);										compilation.chunks.delete(otherChunk);									}								}							}							// don't check already processed chunks twice							notDuplicates.add(chunk);						}					}				);			}		);	}}module.exports = MergeDuplicateChunksPlugin;
 |