-
Notifications
You must be signed in to change notification settings - Fork 12k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor(@angular/build): add experimental chunk optimizer for produc…
…tion application builds An experimental chunk optimizer is now available for initial usage. To enable the optimization, script optimization must be enabled as well as an environment variable `NG_BUILD_OPTIMIZE_CHUNKS=1`. This build step uses `rollup` internally to process the build files directly in memory. The main bundling performs all resolution, bundling, and tree-shaking of the application. The chunk optimizer step then only needs to access the in-memory built files and does not need to perform any disk access or module resolution. This allows the step to be performed fairly quickly but it does add time to the overall production build. The `NG_BUILD_DEBUG_PERF=1` environment variable can be used to view how long the step takes within a build via the `OPTIMIZE_CHUNKS` entry. In the future, this optimization step may be automatically enabled based on initial file entry count and size. There are several current known issues: 1) Bundle budgets for named lazy chunks may not work as expected. 2) The console output may not show names (files will be present) for lazy chunk files. 3) The stats file (`--stats-json` option) will not exactly reflect the final written application files. This is similar to the current behavior of the `browser` builder with Webpack's stat file.
- Loading branch information
Showing
9 changed files
with
274 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
211 changes: 211 additions & 0 deletions
211
packages/angular/build/src/builders/application/chunk-optimizer.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,211 @@ | ||
/** | ||
* @license | ||
* Copyright Google LLC All Rights Reserved. | ||
* | ||
* Use of this source code is governed by an MIT-style license that can be | ||
* found in the LICENSE file at https://angular.dev/license | ||
*/ | ||
|
||
import assert from 'node:assert'; | ||
import { rollup } from 'rollup'; | ||
import { | ||
BuildOutputFile, | ||
BuildOutputFileType, | ||
BundleContextResult, | ||
InitialFileRecord, | ||
} from '../../tools/esbuild/bundler-context'; | ||
import { createOutputFile } from '../../tools/esbuild/utils'; | ||
import { assertIsError } from '../../utils/error'; | ||
|
||
export async function optimizeChunks( | ||
original: BundleContextResult, | ||
sourcemap: boolean | 'hidden', | ||
): Promise<BundleContextResult> { | ||
// Failed builds cannot be optimized | ||
if (original.errors) { | ||
return original; | ||
} | ||
|
||
// Find the main browser entrypoint | ||
let mainFile; | ||
for (const [file, record] of original.initialFiles) { | ||
if ( | ||
record.name === 'main' && | ||
record.entrypoint && | ||
!record.serverFile && | ||
record.type === 'script' | ||
) { | ||
mainFile = file; | ||
break; | ||
} | ||
} | ||
|
||
// No action required if no browser main entrypoint | ||
if (!mainFile) { | ||
return original; | ||
} | ||
|
||
const chunks: Record<string, BuildOutputFile> = {}; | ||
const maps: Record<string, BuildOutputFile> = {}; | ||
for (const originalFile of original.outputFiles) { | ||
if (originalFile.type !== BuildOutputFileType.Browser) { | ||
continue; | ||
} | ||
|
||
if (originalFile.path.endsWith('.js')) { | ||
chunks[originalFile.path] = originalFile; | ||
} else if (originalFile.path.endsWith('.js.map')) { | ||
// Create mapping of JS file to sourcemap content | ||
maps[originalFile.path.slice(0, -4)] = originalFile; | ||
} | ||
} | ||
|
||
const usedChunks = new Set<string>(); | ||
|
||
let bundle; | ||
let optimizedOutput; | ||
try { | ||
bundle = await rollup({ | ||
input: mainFile, | ||
plugins: [ | ||
{ | ||
name: 'angular-bundle', | ||
resolveId(source) { | ||
// Remove leading `./` if present | ||
const file = source[0] === '.' && source[1] === '/' ? source.slice(2) : source; | ||
|
||
if (chunks[file]) { | ||
return file; | ||
} | ||
|
||
// All other identifiers are considered external to maintain behavior | ||
return { id: source, external: true }; | ||
}, | ||
load(id) { | ||
assert( | ||
chunks[id], | ||
`Angular chunk content should always be present in chunk optimizer [${id}].`, | ||
); | ||
|
||
usedChunks.add(id); | ||
|
||
const result = { | ||
code: chunks[id].text, | ||
map: maps[id]?.text, | ||
}; | ||
|
||
return result; | ||
}, | ||
}, | ||
], | ||
}); | ||
|
||
const result = await bundle.generate({ | ||
compact: true, | ||
sourcemap, | ||
chunkFileNames(chunkInfo) { | ||
// Do not add hash to file name if already present | ||
return /-[a-zA-Z0-9]{8}$/.test(chunkInfo.name) ? '[name].js' : '[name]-[hash].js'; | ||
}, | ||
}); | ||
optimizedOutput = result.output; | ||
} catch (e) { | ||
assertIsError(e); | ||
|
||
return { | ||
errors: [ | ||
// Most of these fields are not actually needed for printing the error | ||
{ | ||
id: '', | ||
text: 'Chunk optimization failed', | ||
detail: undefined, | ||
pluginName: '', | ||
location: null, | ||
notes: [ | ||
{ | ||
text: e.message, | ||
location: null, | ||
}, | ||
], | ||
}, | ||
], | ||
warnings: original.warnings, | ||
}; | ||
} finally { | ||
await bundle?.close(); | ||
} | ||
|
||
// Remove used chunks and associated sourcemaps from the original result | ||
original.outputFiles = original.outputFiles.filter( | ||
(file) => | ||
!usedChunks.has(file.path) && | ||
!(file.path.endsWith('.map') && usedChunks.has(file.path.slice(0, -4))), | ||
); | ||
|
||
// Add new optimized chunks | ||
const importsPerFile: Record<string, string[]> = {}; | ||
for (const optimizedFile of optimizedOutput) { | ||
if (optimizedFile.type !== 'chunk') { | ||
continue; | ||
} | ||
|
||
importsPerFile[optimizedFile.fileName] = optimizedFile.imports; | ||
|
||
original.outputFiles.push( | ||
createOutputFile(optimizedFile.fileName, optimizedFile.code, BuildOutputFileType.Browser), | ||
); | ||
if (optimizedFile.map && optimizedFile.sourcemapFileName) { | ||
original.outputFiles.push( | ||
createOutputFile( | ||
optimizedFile.sourcemapFileName, | ||
optimizedFile.map.toString(), | ||
BuildOutputFileType.Browser, | ||
), | ||
); | ||
} | ||
} | ||
|
||
// Update initial files to reflect optimized chunks | ||
const entriesToAnalyze: [string, InitialFileRecord][] = []; | ||
for (const usedFile of usedChunks) { | ||
// Leave the main file since its information did not change | ||
if (usedFile === mainFile) { | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
entriesToAnalyze.push([mainFile, original.initialFiles.get(mainFile)!]); | ||
continue; | ||
} | ||
|
||
// Remove all other used chunks | ||
original.initialFiles.delete(usedFile); | ||
} | ||
|
||
// Analyze for transitive initial files | ||
let currentEntry; | ||
while ((currentEntry = entriesToAnalyze.pop())) { | ||
const [entryPath, entryRecord] = currentEntry; | ||
|
||
for (const importPath of importsPerFile[entryPath]) { | ||
const existingRecord = original.initialFiles.get(importPath); | ||
if (existingRecord) { | ||
// Store the smallest value depth | ||
if (existingRecord.depth > entryRecord.depth + 1) { | ||
existingRecord.depth = entryRecord.depth + 1; | ||
} | ||
|
||
continue; | ||
} | ||
|
||
const record: InitialFileRecord = { | ||
type: 'script', | ||
entrypoint: false, | ||
external: false, | ||
serverFile: false, | ||
depth: entryRecord.depth + 1, | ||
}; | ||
|
||
entriesToAnalyze.push([importPath, record]); | ||
} | ||
} | ||
|
||
return original; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import assert from 'node:assert/strict'; | ||
import { readFile } from 'node:fs/promises'; | ||
import { execWithEnv } from '../../utils/process'; | ||
|
||
/** | ||
* AOT builds with chunk optimizer should contain generated component definitions. | ||
* This is currently testing that the generated code is propagating through the | ||
* chunk optimization step. | ||
*/ | ||
export default async function () { | ||
await execWithEnv('ng', ['build', '--output-hashing=none'], { | ||
...process.env, | ||
NG_BUILD_OPTIMIZE_CHUNKS: '1', | ||
NG_BUILD_MANGLE: '0', | ||
}); | ||
|
||
const content = await readFile('dist/test-project/browser/main.js', 'utf-8'); | ||
assert.match(content, /\\u0275\\u0275defineComponent/); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters