Skip to content

Commit

Permalink
Parallelizes the build script across multiple processes (facebook#15716)
Browse files Browse the repository at this point in the history
* Write size info to separate file per bundle

`bundle-sizes.json` contains the combined size information for every
build. This makes it easier to store and process, but it prevents us
from parallelizing the build script, because each process would need to
write to the same file.

So I've updated the Rollup script to output individual files per build.
A downstream CI job consolidates them into a single file.

I have not parallelized the Rollup script yet. I'll do that next.

* Parallelize the build script

Uses CircleCI's `parallelism` config option to spin up multiple build
processes.
  • Loading branch information
acdlite authored May 29, 2019
1 parent 30b1a80 commit 1cc3bba
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 42 deletions.
37 changes: 20 additions & 17 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -118,41 +118,44 @@ jobs:
build:
docker: *docker
environment: *environment
parallelism: 20
steps:
- checkout
- *restore_yarn_cache
- *run_yarn
- run: ./scripts/circleci/add_build_info_json.sh
- run: ./scripts/circleci/update_package_versions.sh
- run: yarn build
- run: cp ./scripts/rollup/results.json ./build/bundle-sizes.json
- run: ./scripts/circleci/upload_build.sh
- run: ./scripts/circleci/pack_and_store_artifact.sh
- store_artifacts:
path: ./node_modules.tgz
- store_artifacts:
path: ./build.tgz
- store_artifacts:
path: ./build/bundle-sizes.json
- store_artifacts:
path: ./scripts/error-codes/codes.json
- persist_to_workspace:
root: build
paths:
- facebook-www
- node_modules
- react-native
- bundle-sizes.json
- sizes/*.json

sizebot:
process_artifacts:
docker: *docker
environment: *environment
steps:
- checkout
- attach_workspace: *attach_workspace
- *restore_yarn_cache
- *run_yarn
- run: node ./scripts/rollup/consolidateBundleSizes.js
- run: node ./scripts/tasks/danger
- run: ./scripts/circleci/upload_build.sh
- run: ./scripts/circleci/pack_and_store_artifact.sh
- store_artifacts:
path: ./node_modules.tgz
- store_artifacts:
path: ./build.tgz
- store_artifacts:
path: ./build/bundle-sizes.json
- store_artifacts:
# TODO: Update release script to use local file instead of pulling
# from artifacts.
path: ./scripts/error-codes/codes.json

lint_build:
docker: *docker
Expand Down Expand Up @@ -223,13 +226,10 @@ workflows:
- test_source_fire:
requires:
- setup
- test_coverage:
requires:
- setup
- build:
requires:
- setup
- sizebot:
- process_artifacts:
requires:
- build
- lint_build:
Expand All @@ -254,3 +254,6 @@ workflows:
- test_fuzz:
requires:
- setup
- test_coverage:
requires:
- setup
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
.DS_STORE
node_modules
scripts/flow/*/.flowconfig
scripts/rollup/results.json
*~
*.pyc
.grunt
Expand Down
2 changes: 1 addition & 1 deletion scripts/circleci/upload_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ if [ -z "$CI_PULL_REQUEST" ] && [ -n "$BUILD_SERVER_ENDPOINT" ]; then
-F "react-dom.production.min=@build/dist/react-dom.production.min.js" \
-F "react-dom-server.browser.development=@build/dist/react-dom-server.browser.development.js" \
-F "react-dom-server.browser.production.min=@build/dist/react-dom-server.browser.production.min.js" \
-F "results.json=@build/../scripts/rollup/results.json" \
-F "results.json=@build/../build/bundle-sizes.json" \
-F "commit=$CIRCLE_SHA1" \
-F "date=$(git log --format='%ct' -1)" \
-F "pull_request=false" \
Expand Down
50 changes: 32 additions & 18 deletions scripts/rollup/build.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const useForks = require('./plugins/use-forks-plugin');
const stripUnusedImports = require('./plugins/strip-unused-imports');
const extractErrorCodes = require('../error-codes/extract-errors');
const Packaging = require('./packaging');
const {asyncCopyTo, asyncRimRaf} = require('./utils');
const {asyncCopyTo} = require('./utils');
const codeFrame = require('babel-code-frame');
const Wrappers = require('./wrappers');

Expand Down Expand Up @@ -634,27 +634,41 @@ function handleRollupError(error) {
}

async function buildEverything() {
await asyncRimRaf('build');

// Run them serially for better console output
// and to avoid any potential race conditions.

let bundles = [];
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const bundle of Bundles.bundles) {
await createBundle(bundle, UMD_DEV);
await createBundle(bundle, UMD_PROD);
await createBundle(bundle, UMD_PROFILING);
await createBundle(bundle, NODE_DEV);
await createBundle(bundle, NODE_PROD);
await createBundle(bundle, NODE_PROFILING);
await createBundle(bundle, FB_WWW_DEV);
await createBundle(bundle, FB_WWW_PROD);
await createBundle(bundle, FB_WWW_PROFILING);
await createBundle(bundle, RN_OSS_DEV);
await createBundle(bundle, RN_OSS_PROD);
await createBundle(bundle, RN_OSS_PROFILING);
await createBundle(bundle, RN_FB_DEV);
await createBundle(bundle, RN_FB_PROD);
await createBundle(bundle, RN_FB_PROFILING);
bundles.push(
[bundle, UMD_DEV],
[bundle, UMD_PROD],
[bundle, UMD_PROFILING],
[bundle, NODE_DEV],
[bundle, NODE_PROD],
[bundle, NODE_PROFILING],
[bundle, FB_WWW_DEV],
[bundle, FB_WWW_PROD],
[bundle, FB_WWW_PROFILING],
[bundle, RN_OSS_DEV],
[bundle, RN_OSS_PROD],
[bundle, RN_OSS_PROFILING],
[bundle, RN_FB_DEV],
[bundle, RN_FB_PROD],
[bundle, RN_FB_PROFILING]
);
}

if (!shouldExtractErrors && process.env.CIRCLE_NODE_TOTAL) {
// In CI, parallelize bundles across multiple tasks.
const nodeTotal = parseInt(process.env.CIRCLE_NODE_TOTAL, 10);
const nodeIndex = parseInt(process.env.CIRCLE_NODE_INDEX, 10);
bundles = bundles.filter((_, i) => i % nodeTotal === nodeIndex);
}

// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const [bundle, bundleType] of bundles) {
await createBundle(bundle, bundleType);
}

await Packaging.copyAllShims();
Expand Down
25 changes: 25 additions & 0 deletions scripts/rollup/consolidateBundleSizes.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
'use strict';

// Script that combines bundle size information for each build into a single
// JSON file for easier storage and processing.

const fs = require('fs');
const path = require('path');

const BUILD_DIR = path.join(__dirname, '../../build');

const filenames = fs.readdirSync(path.join(BUILD_DIR, 'sizes'));

let bundleSizes = [];
for (let i = 0; i < filenames.length; i++) {
const filename = filenames[i];
if (filename.endsWith('.size.json')) {
const json = fs.readFileSync(path.join(BUILD_DIR, 'sizes', filename));
bundleSizes.push(JSON.parse(json));
}
}

const outputFilename = path.join(BUILD_DIR, 'bundle-sizes.json');
const outputContents = JSON.stringify({bundleSizes}, null, 2);

fs.writeFileSync(outputFilename, outputContents);
24 changes: 19 additions & 5 deletions scripts/rollup/stats.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,35 @@ const filesize = require('filesize');
const chalk = require('chalk');
const join = require('path').join;
const fs = require('fs');
const prevBuildResults = fs.existsSync(__dirname + '/results.json')
? require('./results.json')
const mkdirp = require('mkdirp');

const BUNDLE_SIZES_FILE_NAME = join(__dirname, '../../build/bundle-sizes.json');
const prevBuildResults = fs.existsSync(BUNDLE_SIZES_FILE_NAME)
? require(BUNDLE_SIZES_FILE_NAME)
: {bundleSizes: []};

const currentBuildResults = {
// Mutated inside build.js during a build run.
// We make a copy so that partial rebuilds don't erase other stats.
bundleSizes: [...prevBuildResults.bundleSizes],
bundleSizes: [],
};

function saveResults() {
// Write all the bundle sizes to a single JSON file.
fs.writeFileSync(
join('scripts', 'rollup', 'results.json'),
BUNDLE_SIZES_FILE_NAME,
JSON.stringify(currentBuildResults, null, 2)
);

// Also write each bundle size to a separate file. That way multiple build
// processes can run in parallel and generate separate size artifacts.
// A downstream job can combine them into a single JSON file.
mkdirp.sync('build/sizes');
currentBuildResults.bundleSizes.forEach(results => {
fs.writeFileSync(
join('build', 'sizes', `${results.filename}.size.json`),
JSON.stringify(results, null, 2)
);
});
}

function fractionalChange(prev, current) {
Expand Down

0 comments on commit 1cc3bba

Please sign in to comment.