diff --git a/README.md b/README.md index 43b8e677..85dd2c01 100644 --- a/README.md +++ b/README.md @@ -178,6 +178,9 @@ Analyzer will use module sizes from stats file. ``` To get more information about it you can read [issue #147](https://github.com/webpack-contrib/webpack-bundle-analyzer/issues/147). +### I don't see any stats for compressed files + +`webpack-bundle-analyzer` automatically detects files compressed with gzip(.gz) or brotli(.br), and decompresses them to generate stats. If using `compression-webpack-plugin` with `deleteOriginalAssets: true`, make sure to provide same name to output and compressed files so that correct chunk can be referred, see [issue #377](https://github.com/webpack-contrib/webpack-bundle-analyzer/issues/377#issuecomment-682347389). For brotli assets, make sure your node version is `>=10.22.0`.

Maintainers

diff --git a/src/analyzer.js b/src/analyzer.js index 0a93b34d..ac63b0db 100644 --- a/src/analyzer.js +++ b/src/analyzer.js @@ -10,7 +10,7 @@ const {parseBundle} = require('./parseUtils'); const {createAssetsFilter} = require('./utils'); const FILENAME_QUERY_REGEXP = /\?.*$/u; -const FILENAME_EXTENSIONS = /\.(js|mjs)$/iu; +const FILENAME_EXTENSIONS = /\.(js|mjs|gz|br)$/iu; module.exports = { getViewerData, @@ -47,7 +47,7 @@ function getViewerData(bundleStats, bundleDir, opts) { }); } - // Picking only `*.js or *.mjs` assets from bundle that has non-empty `chunks` array + // Picking only `*.js or *.mjs or *.gz or *.br` assets from bundle that has non-empty `chunks` array bundleStats.assets = _.filter(bundleStats.assets, asset => { // Removing query part from filename (yes, somebody uses it for some reason and Webpack supports it) // See #22 @@ -69,7 +69,7 @@ function getViewerData(bundleStats, bundleDir, opts) { let bundleInfo; try { - bundleInfo = parseBundle(assetFile); + bundleInfo = parseBundle(assetFile, {logger}); } catch (err) { const msg = (err.code === 'ENOENT') ? 'no such file' : err.message; logger.warn(`Error parsing bundle asset "${assetFile}": ${msg}`); diff --git a/src/parseUtils.js b/src/parseUtils.js index f5989105..1a34d133 100644 --- a/src/parseUtils.js +++ b/src/parseUtils.js @@ -2,13 +2,38 @@ const fs = require('fs'); const _ = require('lodash'); const acorn = require('acorn'); const walk = require('acorn-walk'); +const zlib = require('zlib'); +const Logger = require('./Logger'); module.exports = { parseBundle }; -function parseBundle(bundlePath) { - const content = fs.readFileSync(bundlePath, 'utf8'); +const COMPRESSED_EXTENSIONS = /\.(gz|br)$/iu; +const DECOMPRESSION_ALGORITHMS = { + gz: 'unzipSync', + br: 'brotliDecompressSync' +}; + +function decompressBundle(bundlePath, {logger = new Logger()}) { + const decompressAlgorithm = DECOMPRESSION_ALGORITHMS[bundlePath.split('.').pop()]; + if (zlib[decompressAlgorithm]) { + const compressedBuffer = fs.readFileSync(bundlePath); + const decompressedBuffer = zlib[decompressAlgorithm](compressedBuffer); + return decompressedBuffer.toString(); + } else { + logger.warn(`Bundle "${bundlePath}" could be compressed, consider upgrading node version`); + return ''; + } +} + +function parseBundle(bundlePath, opts = {}) { + let content; + if (COMPRESSED_EXTENSIONS.test(bundlePath)) { + content = decompressBundle(bundlePath, opts); + } else { + content = fs.readFileSync(bundlePath, 'utf8'); + } const ast = acorn.parse(content, { sourceType: 'script', // I believe in a bright future of ECMAScript! diff --git a/src/viewer.js b/src/viewer.js index 4d4d0d27..72e9a79e 100644 --- a/src/viewer.js +++ b/src/viewer.js @@ -181,7 +181,9 @@ async function generateReport(bundleStats, opts) { } async function generateJSONReport(bundleStats, opts) { - const {reportFilename, bundleDir = null, logger = new Logger(), excludeAssets = null} = opts || {}; + const { + reportFilename, bundleDir = null, logger = new Logger(), excludeAssets = null + } = opts || {}; const chartData = getChartData({logger, excludeAssets}, bundleStats, bundleDir); diff --git a/test/.eslintrc.json b/test/.eslintrc.json index b2b1cbb0..dd51f4db 100644 --- a/test/.eslintrc.json +++ b/test/.eslintrc.json @@ -7,6 +7,7 @@ "globals": { "expect": true, "makeWebpackConfig": true, - "webpackCompile": true + "webpackCompile": true, + "hasNodeVersion": true } } diff --git a/test/bundles/validBundleWithArrowFunction.js.br b/test/bundles/validBundleWithArrowFunction.js.br new file mode 100644 index 00000000..524587b6 Binary files /dev/null and b/test/bundles/validBundleWithArrowFunction.js.br differ diff --git a/test/bundles/validBundleWithArrowFunction.js.gz b/test/bundles/validBundleWithArrowFunction.js.gz new file mode 100644 index 00000000..06c76d28 Binary files /dev/null and b/test/bundles/validBundleWithArrowFunction.js.gz differ diff --git a/test/helpers.js b/test/helpers.js index c8a9258e..5d79b07f 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -7,6 +7,7 @@ chai.use(require('chai-subset')); global.expect = chai.expect; global.webpackCompile = webpackCompile; global.makeWebpackConfig = makeWebpackConfig; +global.hasNodeVersion = hasNodeVersion; const BundleAnalyzerPlugin = require('../lib/BundleAnalyzerPlugin'); @@ -80,3 +81,13 @@ function makeWebpackConfig(opts) { function wait(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } + +function hasNodeVersion(version) { + const currentVersion = process.version.split('v')[1].split('.'); + const versions = version.split('.'); + for (let i = 0; i < versions.length; i++) { + if (Number(currentVersion[i]) > Number(versions[i])) return true; + else if (Number(currentVersion[i]) < Number(versions[i])) return false; + } + return true; +} diff --git a/test/parseUtils.js b/test/parseUtils.js index ec08aa46..5bee9759 100644 --- a/test/parseUtils.js +++ b/test/parseUtils.js @@ -4,6 +4,10 @@ const _ = require('lodash'); const {parseBundle} = require('../lib/parseUtils'); const BUNDLES_DIR = `${__dirname}/bundles`; +const COMPRESSIONS = { + brotli: {extension: 'br', minVersion: '10.22.0'}, + gzip: {extension: 'gz'} +}; describe('parseBundle', function () { const bundles = fs @@ -30,4 +34,23 @@ describe('parseBundle', function () { expect(bundle.src).to.equal(fs.readFileSync(bundleFile, 'utf8')); expect(bundle.modules).to.deep.equal({}); }); + + Object.keys(COMPRESSIONS) + .forEach(compressionType => { + it(`should parse compressed ${compressionType} bundle`, function () { + const {extension, minVersion} = COMPRESSIONS[compressionType]; + const bundleFile = `${BUNDLES_DIR}/validBundleWithArrowFunction.js`; + const compressedBundleFile = `${bundleFile}.${extension}`; + const expectedModules = JSON.parse(fs.readFileSync(`${BUNDLES_DIR}/validBundleWithArrowFunction.modules.json`)); + const bundle = parseBundle(compressedBundleFile); + if (minVersion && !hasNodeVersion(minVersion)) { + expect(bundle.src).to.not.equal(fs.readFileSync(bundleFile, 'utf8')); + expect(bundle.modules).to.deep.not.equal(expectedModules.modules); + } else { + expect(bundle.src).to.equal(fs.readFileSync(bundleFile, 'utf8')); + expect(bundle.modules).to.deep.equal(expectedModules.modules); + } + }); + }); + });