- Remove directories that should no longer be present in the build. - Add watching for new files. - Add debounce and batching to reduce verbosity and avoid needless cleanup and "add to omni" steps when entire directories are affected.
This commit is contained in:
parent
d688ebc10f
commit
0478e66a47
3 changed files with 193 additions and 50 deletions
5
js-build/.eslintrc
Normal file
5
js-build/.eslintrc
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"rules": {
|
||||||
|
"no-process-env": "off"
|
||||||
|
}
|
||||||
|
}
|
|
@ -32,7 +32,8 @@ function onProgress(sourcefile, outfile, operation) {
|
||||||
}
|
}
|
||||||
if (NODE_ENV == 'debug') {
|
if (NODE_ENV == 'debug') {
|
||||||
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile} -> ${outfile}`);
|
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile} -> ${outfile}`);
|
||||||
} else {
|
}
|
||||||
|
else {
|
||||||
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile}`);
|
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +43,8 @@ async function getSignatures() {
|
||||||
var signatures = {};
|
var signatures = {};
|
||||||
try {
|
try {
|
||||||
signatures = await fs.readJson(signaturesFile);
|
signatures = await fs.readJson(signaturesFile);
|
||||||
} catch (_) {
|
}
|
||||||
|
catch (_) {
|
||||||
// if signatures files doesn't exist, return empty object instead
|
// if signatures files doesn't exist, return empty object instead
|
||||||
}
|
}
|
||||||
return signatures;
|
return signatures;
|
||||||
|
@ -54,27 +56,66 @@ async function writeSignatures(signatures) {
|
||||||
await fs.outputJson(signaturesFile, signatures);
|
await fs.outputJson(signaturesFile, signatures);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function recursivelyRemoveEmptyDirsUp(dirsSeen, invalidDirsCount = 0, removedDirsCount = 0) {
|
||||||
|
const newDirsSeen = new Set();
|
||||||
|
for (let dir of dirsSeen) {
|
||||||
|
try {
|
||||||
|
// check if dir from signatures exists in source
|
||||||
|
await fs.access(dir, fs.constants.F_OK);
|
||||||
|
}
|
||||||
|
catch (_) {
|
||||||
|
invalidDirsCount++;
|
||||||
|
NODE_ENV == 'debug' && console.log(`Dir ${dir} found in signatures but not in src, deleting from build`);
|
||||||
|
try {
|
||||||
|
await fs.remove(path.join('build', dir));
|
||||||
|
const parentDir = path.dirname(dir);
|
||||||
|
if (!dirsSeen.has(parentDir) && parentDir !== ROOT) {
|
||||||
|
newDirsSeen.add(path.dirname(dir));
|
||||||
|
}
|
||||||
|
removedDirsCount++;
|
||||||
|
}
|
||||||
|
catch (_) {
|
||||||
|
// dir wasn't in the build either
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (newDirsSeen.size) {
|
||||||
|
return recursivelyRemoveEmptyDirsUp(newDirsSeen, invalidDirsCount, removedDirsCount);
|
||||||
|
}
|
||||||
|
return { invalidDirsCount, removedDirsCount };
|
||||||
|
}
|
||||||
|
|
||||||
async function cleanUp(signatures) {
|
async function cleanUp(signatures) {
|
||||||
const t1 = Date.now();
|
const t1 = Date.now();
|
||||||
|
let dirsSeen = new Set();
|
||||||
var removedCount = 0, invalidCount = 0;
|
var removedCount = 0, invalidCount = 0;
|
||||||
|
|
||||||
for (let f of Object.keys(signatures)) {
|
for (let f of Object.keys(signatures)) {
|
||||||
|
let dir = path.dirname(f);
|
||||||
|
dirsSeen.add(dir);
|
||||||
try {
|
try {
|
||||||
// check if file from signatures exists in source
|
// check if file from signatures exists in source
|
||||||
await fs.access(f, fs.constants.F_OK);
|
await fs.access(f, fs.constants.F_OK);
|
||||||
} catch (_) {
|
}
|
||||||
|
catch (_) {
|
||||||
invalidCount++;
|
invalidCount++;
|
||||||
NODE_ENV == 'debug' && console.log(`File ${f} found in signatures but not in src, deleting from build`);
|
NODE_ENV == 'debug' && console.log(`File ${f} found in signatures but not in src, deleting from build`);
|
||||||
try {
|
try {
|
||||||
await fs.remove(path.join('build', f));
|
await fs.remove(path.join('build', f));
|
||||||
removedCount++;
|
removedCount++;
|
||||||
} catch (_) {
|
}
|
||||||
|
catch (_) {
|
||||||
// file wasn't in the build either
|
// file wasn't in the build either
|
||||||
}
|
}
|
||||||
delete signatures[f];
|
delete signatures[f];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { invalidDirsCount, removedDirsCount } = await recursivelyRemoveEmptyDirsUp(dirsSeen);
|
||||||
|
invalidCount += invalidDirsCount;
|
||||||
|
removedCount += removedDirsCount;
|
||||||
|
|
||||||
const t2 = Date.now();
|
const t2 = Date.now();
|
||||||
return {
|
return {
|
||||||
action: 'cleanup',
|
action: 'cleanup',
|
||||||
|
@ -97,8 +138,8 @@ async function getFileSignature(file) {
|
||||||
function compareSignatures(a, b) {
|
function compareSignatures(a, b) {
|
||||||
return typeof a === 'object'
|
return typeof a === 'object'
|
||||||
&& typeof b === 'object'
|
&& typeof b === 'object'
|
||||||
&& a != null
|
&& a !== null
|
||||||
&& b != null
|
&& b !== null
|
||||||
&& ['mode', 'mtime', 'isDirectory', 'isFile'].reduce((acc, k) => {
|
&& ['mode', 'mtime', 'isDirectory', 'isFile'].reduce((acc, k) => {
|
||||||
return acc ? k in a && k in b && a[k] == b[k] : false;
|
return acc ? k in a && k in b && a[k] == b[k] : false;
|
||||||
}, true);
|
}, true);
|
||||||
|
@ -108,7 +149,7 @@ function getPathRelativeTo(f, dirName) {
|
||||||
return path.relative(path.join(ROOT, dirName), path.join(ROOT, f));
|
return path.relative(path.join(ROOT, dirName), path.join(ROOT, f));
|
||||||
}
|
}
|
||||||
|
|
||||||
const formatDirsForMatcher = dirs => {
|
const formatDirsForMatcher = (dirs) => {
|
||||||
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
|
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -117,12 +158,22 @@ function comparePaths(actualPath, testedPath) {
|
||||||
return path.normalize(actualPath) === path.normalize(testedPath);
|
return path.normalize(actualPath) === path.normalize(testedPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function debounce(func, timeout = 200) {
|
||||||
|
let timer;
|
||||||
|
return (...args) => {
|
||||||
|
clearTimeout(timer);
|
||||||
|
timer = setTimeout(() => func.apply(this, args), timeout);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const envCheckTrue = env => !!(env && (parseInt(env) || env === true || env === "true"));
|
const envCheckTrue = env => !!(env && (parseInt(env) || env === true || env === "true"));
|
||||||
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
cleanUp,
|
cleanUp,
|
||||||
comparePaths,
|
comparePaths,
|
||||||
compareSignatures,
|
compareSignatures,
|
||||||
|
debounce,
|
||||||
envCheckTrue,
|
envCheckTrue,
|
||||||
formatDirsForMatcher,
|
formatDirsForMatcher,
|
||||||
getFileSignature,
|
getFileSignature,
|
||||||
|
|
|
@ -4,11 +4,12 @@ const chokidar = require('chokidar');
|
||||||
const multimatch = require('multimatch');
|
const multimatch = require('multimatch');
|
||||||
const { exec } = require('child_process');
|
const { exec } = require('child_process');
|
||||||
const { dirs, jsFiles, scssFiles, ignoreMask, copyDirs, symlinkFiles } = require('./config');
|
const { dirs, jsFiles, scssFiles, ignoreMask, copyDirs, symlinkFiles } = require('./config');
|
||||||
const { envCheckTrue, onSuccess, onError, getSignatures, writeSignatures, cleanUp, formatDirsForMatcher } = require('./utils');
|
const { debounce, envCheckTrue, onSuccess, onError, getSignatures, writeSignatures, cleanUp, formatDirsForMatcher } = require('./utils');
|
||||||
const getJS = require('./js');
|
const getJS = require('./js');
|
||||||
const getSass = require('./sass');
|
const getSass = require('./sass');
|
||||||
const getCopy = require('./copy');
|
const getCopy = require('./copy');
|
||||||
const getSymlinks = require('./symlinks');
|
const getSymlinks = require('./symlinks');
|
||||||
|
const colors = require('colors/safe');
|
||||||
|
|
||||||
|
|
||||||
const ROOT = path.resolve(__dirname, '..');
|
const ROOT = path.resolve(__dirname, '..');
|
||||||
|
@ -47,7 +48,7 @@ var signatures;
|
||||||
|
|
||||||
process.on('SIGINT', () => {
|
process.on('SIGINT', () => {
|
||||||
writeSignatures(signatures);
|
writeSignatures(signatures);
|
||||||
process.exit();
|
process.exit(); // eslint-disable-line no-process-exit
|
||||||
});
|
});
|
||||||
|
|
||||||
async function addOmniFiles(relPaths) {
|
async function addOmniFiles(relPaths) {
|
||||||
|
@ -78,15 +79,7 @@ async function addOmniFiles(relPaths) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getWatch() {
|
async function processFile(path) {
|
||||||
try {
|
|
||||||
await fs.access(addOmniExecPath, fs.constants.F_OK);
|
|
||||||
shouldAddOmni = !envCheckTrue(process.env.SKIP_OMNI);
|
|
||||||
}
|
|
||||||
catch (_) {}
|
|
||||||
|
|
||||||
let watcher = chokidar.watch(source, { cwd: ROOT })
|
|
||||||
.on('change', async (path) => {
|
|
||||||
try {
|
try {
|
||||||
var result = false;
|
var result = false;
|
||||||
if (multimatch(path, jsFiles).length && !multimatch(path, ignoreMask).length) {
|
if (multimatch(path, jsFiles).length && !multimatch(path, ignoreMask).length) {
|
||||||
|
@ -107,24 +100,118 @@ async function getWatch() {
|
||||||
if (!result && multimatch(path, symlinks).length) {
|
if (!result && multimatch(path, symlinks).length) {
|
||||||
result = await getSymlinks(path, { nodir: true }, signatures);
|
result = await getSymlinks(path, { nodir: true }, signatures);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result) {
|
|
||||||
onSuccess(result);
|
|
||||||
}
|
|
||||||
onSuccess(await cleanUp(signatures));
|
|
||||||
|
|
||||||
if (shouldAddOmni && result.outFiles?.length) {
|
|
||||||
onSuccess(await addOmniFiles(result.outFiles));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
onError(err);
|
result = false;
|
||||||
}
|
}
|
||||||
})
|
return result;
|
||||||
.on('unlink', async () => {
|
}
|
||||||
const signatures = await getSignatures();
|
|
||||||
onSuccess(await cleanUp(signatures));
|
async function processFiles(mutex) {
|
||||||
|
mutex.isLocked = true;
|
||||||
|
try {
|
||||||
|
const t1 = Date.now();
|
||||||
|
let paths = Array.from(mutex.batch);
|
||||||
|
let results = await Promise.all(paths.map(processFile));
|
||||||
|
let t2 = Date.now();
|
||||||
|
let aggrResult;
|
||||||
|
|
||||||
|
if (results.length === 1) {
|
||||||
|
onSuccess(results[0]);
|
||||||
|
aggrResult = results[0];
|
||||||
|
}
|
||||||
|
else if (results.length > 1) {
|
||||||
|
aggrResult = results.reduce((acc, result) => {
|
||||||
|
if (!(result.action in acc)) {
|
||||||
|
acc.actions[result.action] = 0;
|
||||||
|
}
|
||||||
|
acc.actions[result.action] += result.count;
|
||||||
|
acc.count += result.count;
|
||||||
|
acc.outFiles = acc.outFiles.concat(result.outFiles || []);
|
||||||
|
return acc;
|
||||||
|
}, { actions: {}, count: 0, processingTime: t2 - t1, outFiles: [] });
|
||||||
|
|
||||||
|
onSuccess({
|
||||||
|
action: Object.keys(aggrResult.actions).length > 1 ? 'multiple' : Object.keys(aggrResult.actions)[0],
|
||||||
|
count: aggrResult.count,
|
||||||
|
processingTime: aggrResult.processingTime,
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
onSuccess(await cleanUp(signatures));
|
||||||
|
|
||||||
|
if (shouldAddOmni && aggrResult.outFiles?.length) {
|
||||||
|
onSuccess(await addOmniFiles(aggrResult.outFiles));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
mutex.isLocked = false;
|
||||||
|
mutex.batch.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function batchProcessFiles(path, mutex, debouncedProcessFiles) {
|
||||||
|
let counter = 0;
|
||||||
|
let pollInterval = 250;
|
||||||
|
let started = Date.now();
|
||||||
|
|
||||||
|
// if there's a batch processing and another batch waiting, add to it
|
||||||
|
if (mutex.isLocked && mutex.nextBatch) {
|
||||||
|
mutex.nextBatch.add(path);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// else if there's a batch processing, create a new batch
|
||||||
|
else if (mutex.isLocked) {
|
||||||
|
mutex.nextBatch = new Set([path]);
|
||||||
|
}
|
||||||
|
while (mutex.isLocked) {
|
||||||
|
if (counter === 0) {
|
||||||
|
console.log(colors.yellow(`Waiting for previous batch to finish...`));
|
||||||
|
}
|
||||||
|
if (++counter >= 40) {
|
||||||
|
onError(`Batch processing timeout after ${counter * pollInterval}ms. ${mutex.nextBatch.size} files in this batch have not been processed 😢`);
|
||||||
|
mutex.batch.clear();
|
||||||
|
mutex.nextBatch = null;
|
||||||
|
mutex.isLocked = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
process.env.NODE_ENV === 'debug' && console.log(`waiting ${pollInterval}ms...`);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
||||||
|
}
|
||||||
|
if (counter > 0) {
|
||||||
|
console.log(colors.green(`Previous batch finished in ${Date.now() - started}ms. ${mutex.nextBatch.size} files in the next batch.`));
|
||||||
|
}
|
||||||
|
if (mutex.nextBatch) {
|
||||||
|
mutex.batch = new Set([...mutex.nextBatch]);
|
||||||
|
mutex.nextBatch = null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
mutex.batch.add(path);
|
||||||
|
}
|
||||||
|
debouncedProcessFiles();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getWatch() {
|
||||||
|
try {
|
||||||
|
await fs.access(addOmniExecPath, fs.constants.F_OK);
|
||||||
|
shouldAddOmni = !envCheckTrue(process.env.SKIP_OMNI);
|
||||||
|
}
|
||||||
|
catch (_) {}
|
||||||
|
|
||||||
|
let mutex = { batch: new Set(), isLocked: false };
|
||||||
|
const debouncedProcessFiles = debounce(() => processFiles(mutex));
|
||||||
|
|
||||||
|
let watcher = chokidar.watch(source, { cwd: ROOT, ignoreInitial: true })
|
||||||
|
.on('change', (path) => {
|
||||||
|
batchProcessFiles(path, mutex, debouncedProcessFiles);
|
||||||
|
})
|
||||||
|
.on('add', (path) => {
|
||||||
|
batchProcessFiles(path, mutex, debouncedProcessFiles);
|
||||||
|
})
|
||||||
|
.on('unlink', debounce(async () => {
|
||||||
|
onSuccess(await cleanUp(signatures));
|
||||||
|
}));
|
||||||
|
|
||||||
watcher.add(source);
|
watcher.add(source);
|
||||||
console.log(`Watching files for changes (omni updates ${shouldAddOmni ? 'enabled' : 'disabled'})...`);
|
console.log(`Watching files for changes (omni updates ${shouldAddOmni ? 'enabled' : 'disabled'})...`);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue