Better build process (#1248)
* Remove gulp, replace with custom scripts * Symlink entire dirs where possible (fixes #1232) * Significantly speed up subsequent builds (fixes #1238) * Watch process now observes new/removed files, not only changed * Add ignoreMask, exclude all files with names starting with a # * Better logging during builds * Update travis.yml to use new, non-gulp-based build
This commit is contained in:
parent
3259b63081
commit
b53fabbb58
19 changed files with 1149 additions and 1607 deletions
56
scripts/babel-worker.js
Normal file
56
scripts/babel-worker.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
/* global onmessage: true, postMessage: false */
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const babel = require('babel-core');
|
||||
const multimatch = require('multimatch');
|
||||
const options = JSON.parse(fs.readFileSync('.babelrc'));
|
||||
const cluster = require('cluster');
|
||||
|
||||
/* exported onmessage */
|
||||
async function babelWorker(ev) {
|
||||
const t1 = Date.now();
|
||||
const sourcefile = ev.file;
|
||||
const outfile = path.join('build', sourcefile);
|
||||
const postError = (error) => {
|
||||
process.send({
|
||||
sourcefile,
|
||||
outfile,
|
||||
error
|
||||
});
|
||||
};
|
||||
|
||||
var isSkipped = false;
|
||||
var transformed;
|
||||
|
||||
try {
|
||||
let contents = await fs.readFile(sourcefile, 'utf8');
|
||||
if (sourcefile === 'resource/react-dom.js') {
|
||||
// patch react
|
||||
transformed = contents.replace(/ownerDocument\.createElement\((.*?)\)/gi, 'ownerDocument.createElementNS(DOMNamespaces.html, $1)');
|
||||
} else if ('ignore' in options && options.ignore.some(ignoreGlob => multimatch(sourcefile, ignoreGlob).length)) {
|
||||
transformed = contents;
|
||||
isSkipped = true;
|
||||
} else {
|
||||
try {
|
||||
transformed = babel.transform(contents, options).code;
|
||||
} catch (error) { return postError(`Babel error: ${error}`);}
|
||||
}
|
||||
|
||||
await fs.outputFile(outfile, transformed);
|
||||
const t2 = Date.now();
|
||||
process.send({
|
||||
isSkipped,
|
||||
sourcefile,
|
||||
outfile,
|
||||
processingTime: t2 - t1
|
||||
});
|
||||
} catch (error) { return postError(`I/O error: ${error}`); }
|
||||
}
|
||||
|
||||
module.exports = babelWorker;
|
||||
|
||||
if (cluster.isWorker) {
|
||||
process.on('message', babelWorker);
|
||||
}
|
78
scripts/browserify.js
Normal file
78
scripts/browserify.js
Normal file
|
@ -0,0 +1,78 @@
|
|||
'use strict';
|
||||
|
||||
const browserify = require('browserify');
|
||||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
|
||||
|
||||
const { browserifyConfigs } = require('./config');
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
async function getBrowserify(signatures) {
|
||||
const t1 = Date.now();
|
||||
var count = 0;
|
||||
var config, f, totalCount;
|
||||
|
||||
while ((config = browserifyConfigs.pop()) != null) {
|
||||
let files = await globby(config.src, { cwd: ROOT });
|
||||
totalCount += files.length;
|
||||
|
||||
while ((f = files.pop()) != null) {
|
||||
let newFileSignature = await getFileSignature(f);
|
||||
const dest = path.join('build', config.dest);
|
||||
|
||||
if (f in signatures) {
|
||||
if (compareSignatures(newFileSignature, signatures[f])) {
|
||||
try {
|
||||
await fs.access(dest, fs.constants.F_OK);
|
||||
continue;
|
||||
} catch (_) {
|
||||
// file does not exists in build, fallback to browserifing
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
await fs.mkdirp(path.dirname(dest));
|
||||
const bundleFs = fs.createWriteStream(dest);
|
||||
await new Promise((resolve, reject) => {
|
||||
bundleFs
|
||||
.on('error', reject)
|
||||
.on('finish', resolve);
|
||||
browserify(f, config.config).bundle().pipe(bundleFs);
|
||||
});
|
||||
|
||||
onProgress(f, dest, 'browserify');
|
||||
signatures[f] = newFileSignature;
|
||||
count++;
|
||||
} catch (err) {
|
||||
throw new Error(`Failed on ${f}: ${err}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const t2 = Date.now();
|
||||
return {
|
||||
action: 'browserify',
|
||||
count,
|
||||
totalCount,
|
||||
processingTime: t2 - t1
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getBrowserify;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await getBrowserify(signatures));
|
||||
onSuccess(await cleanUp(signatures));
|
||||
await writeSignatures(signatures);
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
44
scripts/build.js
Normal file
44
scripts/build.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
const colors = require('colors/safe');
|
||||
|
||||
const getBrowserify = require('./browserify');
|
||||
const getCopy = require('./copy');
|
||||
const getJS = require('./js');
|
||||
const getSass = require('./sass');
|
||||
const getSymlinks = require('./symlinks');
|
||||
const { formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError} = require('./utils');
|
||||
const { dirs, symlinkDirs, copyDirs, symlinkFiles, jsFiles, ignoreMask } = require('./config');
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const t1 = Date.now();
|
||||
global.isError = false; // used to prevent further output to avoid concealing errors
|
||||
const symlinks = symlinkFiles
|
||||
.concat(dirs.map(d => `${d}/**`))
|
||||
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
|
||||
.concat([`!${formatDirsForMatcher(copyDirs)}/**`])
|
||||
|
||||
const signatures = await getSignatures();
|
||||
const results = await Promise.all([
|
||||
getBrowserify(signatures),
|
||||
getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures),
|
||||
getJS(jsFiles, { ignore: ignoreMask }, signatures),
|
||||
getSass('scss/*.scss', { root: 'scss', ignore: ignoreMask }, signatures),
|
||||
getSymlinks(symlinks, { nodir: true, ignore: ignoreMask }, signatures),
|
||||
getSymlinks(symlinkDirs, { ignore: ignoreMask }, signatures),
|
||||
cleanUp(signatures)
|
||||
]);
|
||||
|
||||
await writeSignatures(signatures);
|
||||
for (const result of results) {
|
||||
onSuccess(result);
|
||||
}
|
||||
const t2 = Date.now();
|
||||
console.log(colors.yellow(`Total build time ${t2 - t1}ms`));
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
26
scripts/clean.js
Normal file
26
scripts/clean.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { onError } = require('./utils');
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
async function getClean(source) {
|
||||
await fs.remove(source);
|
||||
}
|
||||
|
||||
module.exports = getClean;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
await getClean(path.join(ROOT, 'build'));
|
||||
await getClean(path.join(ROOT, '.signatures.json'));
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
60
scripts/config.js
Normal file
60
scripts/config.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
// list of folders from where .js files are compiled and non-js files are symlinked
|
||||
const dirs = [
|
||||
'chrome',
|
||||
'components',
|
||||
'defaults',
|
||||
'resource',
|
||||
'resource/web-library',
|
||||
'test',
|
||||
'test/resource/chai',
|
||||
'test/resource/chai-as-promised',
|
||||
'test/resource/mocha'
|
||||
];
|
||||
|
||||
// list of folders from which all files are symlinked
|
||||
const symlinkDirs = [
|
||||
'styles',
|
||||
'translators'
|
||||
];
|
||||
|
||||
// list of folders which are copied to the build folder
|
||||
const copyDirs = [
|
||||
'test/tests/data' // browser follows symlinks when loading test data
|
||||
// triggering false-positive test results with mismatched URIs
|
||||
];
|
||||
|
||||
// list of files from root folder to symlink
|
||||
const symlinkFiles = [
|
||||
'chrome.manifest', 'install.rdf', 'update.rdf'
|
||||
];
|
||||
|
||||
|
||||
// these files will be browserified during the build
|
||||
const browserifyConfigs = [
|
||||
{
|
||||
src: 'node_modules/sinon/lib/sinon.js',
|
||||
dest: 'test/resource/sinon.js',
|
||||
config: {
|
||||
standalone: 'sinon'
|
||||
}
|
||||
},
|
||||
{
|
||||
src: 'node_modules/chai-as-promised/lib/chai-as-promised.js',
|
||||
dest: 'test/resource/chai-as-promised.js',
|
||||
config: {
|
||||
standalone: 'chaiAsPromised'
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
// exclude mask used for js, copy, symlink and sass tasks
|
||||
const ignoreMask = ['**/#*.*'];
|
||||
|
||||
const jsFiles = [
|
||||
`{${dirs.join(',')}}/**/*.js`,
|
||||
`!{${symlinkDirs.concat(copyDirs).join(',')}}/**/*.js`
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
dirs, symlinkDirs, copyDirs, symlinkFiles, browserifyConfigs, jsFiles, ignoreMask
|
||||
};
|
67
scripts/copy.js
Normal file
67
scripts/copy.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
'use strict';
|
||||
|
||||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
|
||||
const { copyDirs, ignoreMask } = require('./config');
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
async function getCopy(source, options, signatures) {
|
||||
const t1 = Date.now();
|
||||
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
|
||||
const totalCount = files.length;
|
||||
var count = 0;
|
||||
var f;
|
||||
|
||||
while ((f = files.pop()) != null) {
|
||||
let newFileSignature = await getFileSignature(f);
|
||||
const dest = path.join('build', f);
|
||||
|
||||
if (f in signatures) {
|
||||
if (compareSignatures(newFileSignature, signatures[f])) {
|
||||
try {
|
||||
await fs.access(dest, fs.constants.F_OK);
|
||||
continue;
|
||||
} catch (_) {
|
||||
// file does not exists in build, fallback to browserifing
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
await fs.mkdirp(path.dirname(dest));
|
||||
await fs.copy(f, dest);
|
||||
onProgress(f, dest, 'cp');
|
||||
signatures[f] = newFileSignature;
|
||||
count++;
|
||||
} catch (err) {
|
||||
throw new Error(`Failed on ${f}: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
const t2 = Date.now();
|
||||
return {
|
||||
action: 'copy',
|
||||
count,
|
||||
totalCount,
|
||||
processingTime: t2 - t1
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getCopy;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures));
|
||||
onSuccess(await cleanUp(signatures));
|
||||
await writeSignatures(signatures);
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
130
scripts/js.js
Normal file
130
scripts/js.js
Normal file
|
@ -0,0 +1,130 @@
|
|||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const fs = require('fs-extra');
|
||||
const cluster = require('cluster');
|
||||
const { getSignatures, compareSignatures, getFileSignature, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
|
||||
const { jsFiles, ignoreMask } = require('./config');
|
||||
|
||||
const NODE_ENV = process.env.NODE_ENV;
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
async function getJS(source, options, signatures) {
|
||||
const t1 = Date.now();
|
||||
const matchingJSFiles = await globby(source, Object.assign({ cwd: ROOT }, options));
|
||||
const cpuCount = os.cpus().length;
|
||||
const totalCount = matchingJSFiles.length;
|
||||
var count = 0;
|
||||
var isError = false;
|
||||
|
||||
cluster.setupMaster({
|
||||
exec: path.join(__dirname, 'babel-worker.js')
|
||||
});
|
||||
|
||||
// check signatures, collect signatures for files to be processes
|
||||
const newFilesSignatures = {};
|
||||
const filesForProcessing = [];
|
||||
var f;
|
||||
while ((f = matchingJSFiles.pop()) != null) {
|
||||
const newFileSignature = await getFileSignature(f);
|
||||
const dest = path.join('build', f);
|
||||
f = path.normalize(f);
|
||||
if (f in signatures) {
|
||||
if (compareSignatures(newFileSignature, signatures[f])) {
|
||||
try {
|
||||
await fs.access(dest, fs.constants.F_OK);
|
||||
continue;
|
||||
} catch (_) {
|
||||
// file does not exists in build, fallback to browserifing
|
||||
}
|
||||
}
|
||||
}
|
||||
filesForProcessing.push(f);
|
||||
newFilesSignatures[f] = newFileSignature;
|
||||
}
|
||||
|
||||
// shortcut if no files need rebuilding
|
||||
if (Object.keys(filesForProcessing).length === 0) {
|
||||
const t2 = Date.now();
|
||||
return Promise.resolve({
|
||||
action: 'js',
|
||||
count,
|
||||
totalCount,
|
||||
processingTime: t2 - t1
|
||||
});
|
||||
}
|
||||
|
||||
// distribute processing among workers
|
||||
const workerCount = Math.min(cpuCount, filesForProcessing.length);
|
||||
var workersActive = workerCount;
|
||||
NODE_ENV == 'debug' && console.log(`Will process ${filesForProcessing.length} files using ${workerCount} processes`);
|
||||
return new Promise((resolve, reject) => {
|
||||
for (let i = 0; i < workerCount; i++) {
|
||||
var worker = cluster.fork();
|
||||
|
||||
worker.on('message', function(ev) {
|
||||
if (ev.error) {
|
||||
isError = true;
|
||||
let errorMsg = `Failed while processing ${ev.sourcefile}: ${ev.error}`;
|
||||
reject(errorMsg);
|
||||
} else {
|
||||
signatures[ev.sourcefile] = newFilesSignatures[ev.sourcefile];
|
||||
|
||||
if (ev.isSkipped) {
|
||||
NODE_ENV == 'debug' && console.log(`process ${this.id} SKIPPED ${ev.sourcefile}`);
|
||||
} else {
|
||||
NODE_ENV == 'debug' && console.log(`process ${this.id} took ${ev.processingTime} ms to process ${ev.sourcefile} into ${ev.outfile}`);
|
||||
NODE_ENV != 'debug' && onProgress(ev.sourcefile, ev.outfile, 'js');
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
let nextFile = filesForProcessing.pop();
|
||||
|
||||
if (!isError && nextFile) {
|
||||
NODE_ENV == 'debug' && console.log(`process ${this.id} scheduled to process ${nextFile}`);
|
||||
this.send({
|
||||
file: nextFile
|
||||
});
|
||||
} else {
|
||||
if (this.isConnected()) {
|
||||
this.kill();
|
||||
}
|
||||
NODE_ENV == 'debug' && console.log(`process ${this.id} has terminated`);
|
||||
if (!--workersActive) {
|
||||
const t2 = Date.now();
|
||||
resolve({
|
||||
action: 'js',
|
||||
count,
|
||||
totalCount,
|
||||
processingTime: t2 - t1
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let nextFile = filesForProcessing.pop();
|
||||
NODE_ENV == 'debug' && console.log(`process ${worker.id} scheduled to process ${nextFile}`);
|
||||
worker.send({
|
||||
file: nextFile
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getJS;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await getJS(jsFiles, { ignore: ignoreMask }, signatures));
|
||||
onSuccess(await cleanUp(signatures));
|
||||
await writeSignatures(signatures);
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
73
scripts/sass.js
Normal file
73
scripts/sass.js
Normal file
|
@ -0,0 +1,73 @@
|
|||
'use strict';
|
||||
|
||||
const universalify = require('universalify');
|
||||
const sass = require('node-sass');
|
||||
const globby = require('globby');
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress, getPathRelativeTo } = require('./utils');
|
||||
const { ignoreMask } = require('./config');
|
||||
const sassRender = universalify.fromCallback(sass.render);
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
async function getSass(source, options, signatures) {
|
||||
const t1 = Date.now();
|
||||
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
|
||||
const totalCount = files.length;
|
||||
var count = 0;
|
||||
var f;
|
||||
|
||||
while ((f = files.pop()) != null) {
|
||||
let newFileSignature = await getFileSignature(f);
|
||||
const dest = path.join.apply(this, ['build', 'chrome', 'skin', 'default', 'zotero', 'components', getPathRelativeTo(f, 'scss')]);
|
||||
|
||||
if (f in signatures) {
|
||||
if (compareSignatures(newFileSignature, signatures[f])) {
|
||||
try {
|
||||
await fs.access(dest, fs.constants.F_OK);
|
||||
continue;
|
||||
} catch (_) {
|
||||
// file does not exists in build, fallback to browserifing
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
const sass = await sassRender({
|
||||
file: f
|
||||
});
|
||||
|
||||
await fs.outputFile(dest, sass);
|
||||
onProgress(f, dest, 'sass');
|
||||
signatures[f] = newFileSignature;
|
||||
count++;
|
||||
} catch (err) {
|
||||
throw new Error(`Failed on ${f}: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
const t2 = Date.now();
|
||||
return {
|
||||
action: 'sass',
|
||||
count,
|
||||
totalCount,
|
||||
processingTime: t2 - t1
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getSass;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await getSass('scss/*.scss', { root: 'scss', ignore: ignoreMask }, signatures));
|
||||
onSuccess(await cleanUp(signatures));
|
||||
await writeSignatures(signatures);
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
86
scripts/symlinks.js
Normal file
86
scripts/symlinks.js
Normal file
|
@ -0,0 +1,86 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const globby = require('globby');
|
||||
|
||||
const { isWindows, formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
|
||||
const { dirs, symlinkDirs, copyDirs, symlinkFiles, ignoreMask } = require('./config');
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
|
||||
|
||||
//@TODO: change signature to getSymlinks(source, options, signatures)
|
||||
// here and elsewhere
|
||||
//
|
||||
// run symlinks twice, once for files (with nodir: true)
|
||||
// once for dirs
|
||||
async function getSymlinks(source, options, signatures) {
|
||||
const t1 = Date.now();
|
||||
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
|
||||
const filesDonePreviously = [];
|
||||
for (const [f, signature] of Object.entries(signatures)) {
|
||||
if ('isSymlinked' in signature && signature.isSymlinked) {
|
||||
try {
|
||||
await fs.access(path.join('build', f), fs.constants.F_OK);
|
||||
// file found in signatures and build/ dir, skip
|
||||
filesDonePreviously.push(f);
|
||||
} catch (_) {
|
||||
// file not found, needs symlinking
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filesToProcess = files.filter(f => !filesDonePreviously.includes(f));
|
||||
const filesProcessedCount = filesToProcess.length;
|
||||
|
||||
var f;
|
||||
while ((f = filesToProcess.pop()) != null) {
|
||||
const dest = path.join('build', f);
|
||||
try {
|
||||
if (isWindows) {
|
||||
await fs.copy(f, dest);
|
||||
} else {
|
||||
await fs.ensureSymlink(f, dest);
|
||||
}
|
||||
signatures[f] = {
|
||||
isSymlinked: true
|
||||
};
|
||||
onProgress(f, dest, 'ln');
|
||||
} catch (err) {
|
||||
throw new Error(`Failed on ${f}: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
const t2 = Date.now();
|
||||
|
||||
return {
|
||||
action: 'symlink',
|
||||
count: filesProcessedCount,
|
||||
totalCount: files.length,
|
||||
processingTime: t2 - t1
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
module.exports = getSymlinks;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
try {
|
||||
const source = symlinkFiles
|
||||
.concat(dirs.map(d => `${d}/**`))
|
||||
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
|
||||
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
|
||||
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await getSymlinks(source, { nodir: true, ignore: ignoreMask }, signatures));
|
||||
onSuccess(await getSymlinks(symlinkDirs, {}, signatures));
|
||||
onSuccess(await cleanUp(signatures));
|
||||
await writeSignatures(signatures);
|
||||
} catch (err) {
|
||||
process.exitCode = 1;
|
||||
global.isError = true;
|
||||
onError(err);
|
||||
}
|
||||
})();
|
||||
}
|
126
scripts/utils.js
Normal file
126
scripts/utils.js
Normal file
|
@ -0,0 +1,126 @@
|
|||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const colors = require('colors/safe');
|
||||
const green = colors.green;
|
||||
const blue = colors.blue;
|
||||
const yellow = colors.yellow;
|
||||
const isWindows = /^win/.test(process.platform);
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const NODE_ENV = process.env.NODE_ENV;
|
||||
|
||||
|
||||
function onError(err) {
|
||||
console.log(colors.red('Error:'), err);
|
||||
}
|
||||
|
||||
function onSuccess(result) {
|
||||
var msg = `${green('Success:')} ${blue(`[${result.action}]`)} ${result.count} files processed`;
|
||||
if (result.totalCount) {
|
||||
msg += ` (out of total ${result.totalCount} matched)`;
|
||||
}
|
||||
|
||||
msg += ` [${yellow(`${result.processingTime}ms`)}]`;
|
||||
|
||||
console.log(msg);
|
||||
}
|
||||
|
||||
function onProgress(sourcefile, outfile, operation) {
|
||||
if ('isError' in global && global.isError) {
|
||||
return;
|
||||
}
|
||||
if (NODE_ENV == 'debug') {
|
||||
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile} -> ${outfile}`);
|
||||
} else {
|
||||
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function getSignatures() {
|
||||
let signaturesFile = path.resolve(ROOT, '.signatures.json');
|
||||
var signatures = {};
|
||||
try {
|
||||
signatures = await fs.readJson(signaturesFile);
|
||||
} catch (_) {
|
||||
// if signatures files doesn't exist, return empty object istead
|
||||
}
|
||||
return signatures;
|
||||
}
|
||||
|
||||
async function writeSignatures(signatures) {
|
||||
let signaturesFile = path.resolve(ROOT, '.signatures.json');
|
||||
NODE_ENV == 'debug' && console.log('writing signatures to .signatures.json');
|
||||
await fs.outputJson(signaturesFile, signatures);
|
||||
}
|
||||
|
||||
async function cleanUp(signatures) {
|
||||
const t1 = Date.now();
|
||||
var removedCount = 0, invalidCount = 0;
|
||||
|
||||
for (let f of Object.keys(signatures)) {
|
||||
try {
|
||||
// check if file from signatures exists in source
|
||||
await fs.access(f, fs.constants.F_OK);
|
||||
} catch (_) {
|
||||
invalidCount++;
|
||||
NODE_ENV == 'debug' && console.log(`File ${f} found in signatures but not in src, deleting from build`);
|
||||
try {
|
||||
await fs.remove(path.join('build', f));
|
||||
removedCount++;
|
||||
} catch (_) {
|
||||
// file wasn't in the build either
|
||||
}
|
||||
delete signatures[f];
|
||||
}
|
||||
}
|
||||
|
||||
const t2 = Date.now();
|
||||
return {
|
||||
action: 'cleanup',
|
||||
count: removedCount,
|
||||
totalCount: invalidCount,
|
||||
processingTime: t2 - t1
|
||||
};
|
||||
}
|
||||
|
||||
async function getFileSignature(file) {
|
||||
let stats = await fs.stat(file);
|
||||
return {
|
||||
mode: stats.mode,
|
||||
mtime: stats.mtimeMs || stats.mtime.getTime(),
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile()
|
||||
};
|
||||
}
|
||||
|
||||
function compareSignatures(a, b) {
|
||||
return typeof a === 'object'
|
||||
&& typeof b === 'object'
|
||||
&& a != null
|
||||
&& b != null
|
||||
&& ['mode', 'mtime', 'isDirectory', 'isFile'].reduce((acc, k) => {
|
||||
return acc ? k in a && k in b && a[k] == b[k] : false;
|
||||
}, true);
|
||||
}
|
||||
|
||||
function getPathRelativeTo(f, dirName) {
|
||||
return path.relative(path.join(ROOT, dirName), path.join(ROOT, f));
|
||||
}
|
||||
|
||||
const formatDirsForMatcher = dirs => {
|
||||
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
isWindows,
|
||||
onError,
|
||||
onProgress,
|
||||
onSuccess,
|
||||
cleanUp,
|
||||
getSignatures,
|
||||
getFileSignature,
|
||||
compareSignatures,
|
||||
writeSignatures,
|
||||
getPathRelativeTo,
|
||||
formatDirsForMatcher
|
||||
};
|
81
scripts/watch.js
Normal file
81
scripts/watch.js
Normal file
|
@ -0,0 +1,81 @@
|
|||
const path = require('path');
|
||||
const chokidar = require('chokidar');
|
||||
const multimatch = require('multimatch');
|
||||
const { dirs, jsGlob, jsGlobIgnore, copyDirs, symlinkFiles } = require('./config');
|
||||
const { onSuccess, onError, getSignatures, writeSignatures, cleanUp, formatDirsForMatcher } = require('./utils');
|
||||
const getJS = require('./js');
|
||||
const getSass = require('./sass');
|
||||
const getCopy = require('./copy');
|
||||
const getSymlinks = require('./symlinks');
|
||||
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const source = [
|
||||
'chrome',
|
||||
'components',
|
||||
'defaults',
|
||||
'resource',
|
||||
'scss',
|
||||
'test',
|
||||
'styles',
|
||||
'translators',
|
||||
'scss',
|
||||
'chrome/**',
|
||||
'components/**',
|
||||
'defaults/**',
|
||||
'resource/**',
|
||||
'scss/**',
|
||||
'test/**',
|
||||
'styles/**',
|
||||
'translators/**',
|
||||
'scss/**'
|
||||
];
|
||||
|
||||
const symlinks = symlinkFiles
|
||||
.concat(dirs.map(d => `${d}/**`))
|
||||
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
|
||||
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
|
||||
|
||||
var signatures;
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
writeSignatures(signatures);
|
||||
process.exit();
|
||||
});
|
||||
|
||||
function getWatch() {
|
||||
let watcher = chokidar.watch(source, { cwd: ROOT })
|
||||
.on('change', async (path) => {
|
||||
try {
|
||||
if (multimatch(path, jsGlob).length && !multimatch(path, jsGlobIgnore).length) {
|
||||
onSuccess(await getJS(path, { ignore: jsGlobIgnore }, signatures));
|
||||
} else if (multimatch(path, 'scss/*.scss').length) {
|
||||
onSuccess(await getSass(path, {}, signatures));
|
||||
} else if (multimatch(path, copyDirs.map(d => `${d}/**`)).length) {
|
||||
onSuccess(await getCopy(path, {}, signatures));
|
||||
} else if (multimatch(path, symlinks).length) {
|
||||
onSuccess(await getSymlinks(path, { nodir: true }, signatures));
|
||||
}
|
||||
onSuccess(await cleanUp(signatures));
|
||||
} catch (err) {
|
||||
onError(err);
|
||||
}
|
||||
|
||||
})
|
||||
.on('unlink', async () => {
|
||||
const signatures = await getSignatures();
|
||||
onSuccess(await cleanUp(signatures));
|
||||
});
|
||||
|
||||
watcher.add(source);
|
||||
console.log('Watching files for changes...');
|
||||
}
|
||||
|
||||
module.exports = getWatch;
|
||||
|
||||
if (require.main === module) {
|
||||
(async () => {
|
||||
signatures = await getSignatures();
|
||||
getWatch();
|
||||
})();
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue