Rename scripts folder to js-build

This commit is contained in:
Dan Stillman 2023-04-23 04:36:18 -04:00 committed by Dan Stillman
parent c07eccb468
commit ae0091fbae
15 changed files with 7 additions and 7 deletions

View file

@ -1,94 +0,0 @@
/* global onmessage: true, postMessage: false */
'use strict';
const fs = require('fs-extra');
const path = require('path');
const babel = require('@babel/core');
const multimatch = require('multimatch');
const options = JSON.parse(fs.readFileSync('.babelrc'));
const cluster = require('cluster');
const { comparePaths } = require('./utils');
/* exported onmessage */
async function babelWorker(ev) {
const t1 = Date.now();
const sourcefile = ev.file;
const localOptions = {
filename: sourcefile
};
const outfile = path.join('build', sourcefile.replace('.jsx', '.js'));
const postError = (error) => {
process.send({
sourcefile,
outfile,
error
});
};
var isSkipped = false;
var transformed;
try {
let contents = await fs.readFile(sourcefile, 'utf8');
// Patch react
if (comparePaths(sourcefile, 'resource/react.js')) {
transformed = contents.replace('instanceof Error', '.constructor.name == "Error"')
}
// Patch react-dom
else if (comparePaths(sourcefile, 'resource/react-dom.js')) {
transformed = contents.replace(/ ownerDocument\.createElement\((.*?)\)/gi, 'ownerDocument.createElementNS(HTML_NAMESPACE, $1)')
.replace('element instanceof win.HTMLIFrameElement',
'typeof element != "undefined" && element.tagName.toLowerCase() == "iframe"')
.replace("isInputEventSupported = false", 'isInputEventSupported = true');
}
// Patch react-virtualized
else if (comparePaths(sourcefile, 'resource/react-virtualized.js')) {
transformed = contents.replace('scrollDiv = document.createElement("div")', 'scrollDiv = document.createElementNS("http://www.w3.org/1999/xhtml", "div")')
.replace('document.body.appendChild(scrollDiv)', 'document.documentElement.appendChild(scrollDiv)')
.replace('document.body.removeChild(scrollDiv)', 'document.documentElement.removeChild(scrollDiv)');
}
// Patch single-file
else if (sourcefile === 'resource/SingleFile/lib/single-file.js') {
// Change for what I assume is a bug in Firefox. We create a singlefile
// sandbox which is based on a document.defaultView of a hidden browser.
// The minified single-file then uses globalThis.Set which for some reason
// doesn't properly support iterating over and throws an error. The normal
// `Set` object accessible in the sandbox does not have this problem.
// I've tried using a proxy for globalThis with a custom Set, but that
// manifest its own issues. Setting the globalThis to sandbox produced
// issues with monkey-patching that singleFile does for default interfaces.
transformed = contents.replace('globalThis.Set', 'Set')
.replace('globalThis.Map', 'Map');
}
else if ('ignore' in options && options.ignore.some(ignoreGlob => multimatch(sourcefile, ignoreGlob).length)) {
transformed = contents;
isSkipped = true;
} else {
try {
({ code: transformed } = await babel.transformAsync(
contents,
Object.assign(
localOptions,
options
)
));
} catch (error) { return postError(`Babel error: ${error}`);}
}
await fs.outputFile(outfile, transformed);
const t2 = Date.now();
process.send({
isSkipped,
sourcefile,
outfile,
processingTime: t2 - t1
});
} catch (error) { return postError(`I/O error: ${error}`); }
}
module.exports = babelWorker;
if (cluster.isWorker) {
process.on('message', babelWorker);
}

View file

@ -1,83 +0,0 @@
'use strict';
const browserify = require('browserify');
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
const { browserifyConfigs } = require('./config');
const ROOT = path.resolve(__dirname, '..');
async function getBrowserify(signatures) {
const t1 = Date.now();
const outFiles = [];
var config, f, totalCount;
while ((config = browserifyConfigs.pop()) != null) {
let files = await globby(config.src, { cwd: ROOT });
totalCount += files.length;
while ((f = files.pop()) != null) {
let newFileSignature = await getFileSignature(f);
const dest = path.join('build', config.dest);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
try {
await fs.mkdirp(path.dirname(dest));
const bundleFs = fs.createWriteStream(dest);
await new Promise((resolve, reject) => {
bundleFs
.on('error', reject)
.on('finish', resolve);
browserify(f, config.config)
.external('react')
.external('react-dom')
.bundle()
.pipe(bundleFs);
});
onProgress(f, dest, 'browserify');
signatures[f] = newFileSignature;
outFiles.push(dest);
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
}
const t2 = Date.now();
return {
action: 'browserify',
count: outFiles.length,
outFiles,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getBrowserify;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getBrowserify(signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,56 +0,0 @@
const colors = require('colors/safe');
const getBrowserify = require('./browserify');
const getCopy = require('./copy');
const getJS = require('./js');
const getSass = require('./sass');
const getSymlinks = require('./symlinks');
const getPDFReader = require('./pdf-reader');
const getPDFWorker = require('./pdf-worker');
const getZoteroNoteEditor = require('./note-editor');
const { formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError} = require('./utils');
const { dirs, symlinkDirs, copyDirs, symlinkFiles, jsFiles, scssFiles, ignoreMask } = require('./config');
if (require.main === module) {
(async () => {
try {
const t1 = Date.now();
global.isError = false; // used to prevent further output to avoid concealing errors
const symlinks = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.jsx`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.scss`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
const signatures = await getSignatures();
// Check if all files in signatures are still present in src; Needed to avoid a problem
// where what was a symlink before, now is compiled, resulting in polluting source files
onSuccess(await cleanUp(signatures));
const results = await Promise.all([
getBrowserify(signatures),
getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures),
getJS(jsFiles, { ignore: ignoreMask }, signatures),
...scssFiles.map(scf => getSass(scf, { ignore: ignoreMask }, signatures)),
getSymlinks(symlinks, { nodir: true, ignore: ignoreMask }, signatures),
getSymlinks(symlinkDirs, { ignore: ignoreMask }, signatures),
getPDFReader(signatures),
getPDFWorker(signatures),
getZoteroNoteEditor(signatures)
]);
await writeSignatures(signatures);
for (const result of results) {
onSuccess(result);
}
const t2 = Date.now();
console.log(colors.yellow(`Total build time ${t2 - t1}ms`));
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,29 +0,0 @@
'use strict';
const path = require('path');
const fs = require('fs-extra');
const { onError } = require('./utils');
const ROOT = path.resolve(__dirname, '..');
async function getClean(source) {
await fs.remove(source);
}
module.exports = getClean;
if (require.main === module) {
(async () => {
try {
await getClean(path.join(ROOT, 'build'));
await getClean(path.join(ROOT, '.signatures.json'));
await getClean(path.join(ROOT, 'pdf-reader/build'));
await getClean(path.join(ROOT, 'pdf-worker/build'));
await getClean(path.join(ROOT, 'note-editor/build'));
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,137 +0,0 @@
// list of folders from where .js files are compiled and non-js files are symlinked
const dirs = [
'chrome',
'components',
'defaults',
'test',
'test/resource/chai',
'test/resource/chai-as-promised',
'test/resource/mocha'
];
// list of folders that are symlinked
const symlinkDirs = [
'chrome/content/zotero/xpcom/rdf',
'chrome/content/zotero/xpcom/translate/src',
'styles',
'translators',
];
// list of folders which are copied to the build folder
const copyDirs = [
'test/tests/data' // browser follows symlinks when loading test data
// triggering false-positive test results with mismatched URIs
];
// list of files from root folder to symlink
const symlinkFiles = [
'chrome.manifest',
'install.rdf',
// React needs to be patched by babel-worker.js, so symlink all files in resource/ except for
// those. Babel transpilation for React is still disabled in .babelrc.
'resource/**/*',
'!resource/react.js',
'!resource/react-dom.js',
'!resource/react-virtualized.js',
// Only include dist directory of singleFile
// Also do a little bit of manipulation similar to React
'!resource/SingleFile/**/*',
'resource/SingleFile/lib/**/*',
'!resource/SingleFile/lib/single-file.js',
// We only need a couple Ace Editor files
'!resource/ace/**/*',
'resource/ace/ace.js',
// Enable for autocomplete
//'resource/ace/ext-language_tools.js',
'resource/ace/ext-searchbox.js',
'resource/ace/keybinding-emacs.js',
'resource/ace/keybinding-vim.js',
'resource/ace/mode-javascript.js',
'resource/ace/theme-chrome.js',
'resource/ace/theme-monokai.js',
'resource/ace/worker-javascript.js',
// Feed *.idl files are for documentation only
'!resource/feeds/*.idl',
'update.rdf',
'!chrome/skin/default/zotero/**/*.scss',
'!resource/citeproc_rs_wasm.js',
// We only need a few Monaco languages
'!resource/vs/**/*',
'resource/vs/loader.js',
'resource/vs/editor/editor.main.{js,css,nls.js}',
'resource/vs/base/**/*',
'resource/vs/basic-languages/javascript/*.js',
'resource/vs/basic-languages/typescript/*.js',
'resource/vs/basic-languages/xml/*.js',
'resource/vs/language/typescript/*.js',
'resource/vs/language/json/*.js',
];
// these files will be browserified during the build
const browserifyConfigs = [
{
src: 'node_modules/react-select/dist/react-select.cjs.prod.js',
dest: 'resource/react-select.js',
config: {
standalone: 'react-select'
}
},
{
src: 'node_modules/url/url.js',
dest: 'resource/url.js',
config: {
standalone: 'url'
}
},
{
src: 'node_modules/sinon/lib/sinon.js',
dest: 'test/resource/sinon.js',
config: {
standalone: 'sinon'
}
},
{
src: 'node_modules/chai-as-promised/lib/chai-as-promised.js',
dest: 'test/resource/chai-as-promised.js',
config: {
standalone: 'chaiAsPromised'
}
}
];
// exclude mask used for js, copy, symlink and sass tasks
const ignoreMask = [
'**/#*',
'resource/schema/global/README.md',
'resource/schema/global/schema.json.gz',
'resource/schema/global/scripts/*',
'chrome/content/zotero/xpcom/translate/example/**/*',
'chrome/content/zotero/xpcom/translate/README.md',
'chrome/content/zotero/xpcom/utilities/node_modules/**/*',
'chrome/content/zotero/xpcom/utilities/test/**/*',
];
const jsFiles = [
`{${dirs.join(',')}}/**/*.js`,
`{${dirs.join(',')}}/**/*.jsx`,
`!{${symlinkDirs.concat(copyDirs).join(',')}}/**/*.js`,
`!{${symlinkDirs.concat(copyDirs).join(',')}}/**/*.jsx`,
// Special handling for React -- see note above
'resource/react.js',
'resource/react-dom.js',
'resource/react-virtualized.js',
'resource/SingleFile/lib/single-file.js',
'resource/citeproc_rs_wasm.js',
];
const scssFiles = [
'scss/**/*.scss',
'chrome/skin/default/zotero/**/*.scss'
];
const buildsURL = 'https://zotero-download.s3.amazonaws.com/ci/';
module.exports = {
dirs, symlinkDirs, copyDirs, symlinkFiles, browserifyConfigs, jsFiles, scssFiles, ignoreMask, buildsURL
};

View file

@ -1,68 +0,0 @@
'use strict';
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
const { copyDirs, ignoreMask } = require('./config');
const ROOT = path.resolve(__dirname, '..');
async function getCopy(source, options, signatures) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
const totalCount = files.length;
const outFiles = [];
var f;
while ((f = files.pop()) != null) {
let newFileSignature = await getFileSignature(f);
const dest = path.join('build', f);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
try {
await fs.mkdirp(path.dirname(dest));
await fs.copy(f, dest);
onProgress(f, dest, 'cp');
signatures[f] = newFileSignature;
outFiles.push(dest);
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
const t2 = Date.now();
return {
action: 'copy',
count: outFiles.length,
outFiles,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getCopy;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,132 +0,0 @@
const globby = require('globby');
const path = require('path');
const os = require('os');
const fs = require('fs-extra');
const cluster = require('cluster');
const { getSignatures, compareSignatures, getFileSignature, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
const { jsFiles, ignoreMask } = require('./config');
const NODE_ENV = process.env.NODE_ENV;
const ROOT = path.resolve(__dirname, '..');
async function getJS(source, options, signatures) {
const t1 = Date.now();
const matchingJSFiles = await globby(source, Object.assign({ cwd: ROOT }, options));
const cpuCount = os.cpus().length;
const totalCount = matchingJSFiles.length;
var isError = false;
cluster.setupMaster({
exec: path.join(__dirname, 'babel-worker.js')
});
// check signatures, collect signatures for files to be processes
const newFilesSignatures = {};
const filesForProcessing = [];
var f;
while ((f = matchingJSFiles.pop()) != null) {
const newFileSignature = await getFileSignature(f);
const dest = path.join('build', f.replace('.jsx', '.js'));
f = path.normalize(f);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
filesForProcessing.push(f);
newFilesSignatures[f] = newFileSignature;
}
// shortcut if no files need rebuilding
if (Object.keys(filesForProcessing).length === 0) {
const t2 = Date.now();
return Promise.resolve({
action: 'js',
count: 0,
outFiles: [],
totalCount,
processingTime: t2 - t1
});
}
// distribute processing among workers
const workerCount = Math.min(cpuCount, filesForProcessing.length);
const outFiles = [];
var workersActive = workerCount;
NODE_ENV == 'debug' && console.log(`Will process ${filesForProcessing.length} files using ${workerCount} processes`);
return new Promise((resolve, reject) => {
for (let i = 0; i < workerCount; i++) {
var worker = cluster.fork();
worker.on('message', function(ev) {
if (ev.error) {
isError = true;
let errorMsg = `Failed while processing ${ev.sourcefile}: ${ev.error}`;
reject(errorMsg);
} else {
signatures[ev.sourcefile] = newFilesSignatures[ev.sourcefile];
if (ev.isSkipped) {
NODE_ENV == 'debug' && console.log(`process ${this.id} SKIPPED ${ev.sourcefile}`);
} else {
NODE_ENV == 'debug' && console.log(`process ${this.id} took ${ev.processingTime} ms to process ${ev.sourcefile} into ${ev.outfile}`);
NODE_ENV != 'debug' && onProgress(ev.sourcefile, ev.outfile, 'js');
outFiles.push(ev.outfile);
}
}
let nextFile = filesForProcessing.pop();
if (!isError && nextFile) {
NODE_ENV == 'debug' && console.log(`process ${this.id} scheduled to process ${nextFile}`);
this.send({
file: nextFile
});
} else {
if (this.isConnected()) {
this.kill();
}
NODE_ENV == 'debug' && console.log(`process ${this.id} has terminated`);
if (!--workersActive) {
const t2 = Date.now();
resolve({
action: 'js',
count: outFiles.length,
outFiles,
totalCount,
processingTime: t2 - t1
});
}
}
});
let nextFile = filesForProcessing.pop();
NODE_ENV == 'debug' && console.log(`process ${worker.id} scheduled to process ${nextFile}`);
worker.send({
file: nextFile
});
}
});
}
module.exports = getJS;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getJS(jsFiles, { ignore: ignoreMask }, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,71 +0,0 @@
'use strict';
const fs = require('fs-extra');
const path = require('path');
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const { getSignatures, writeSignatures, onSuccess, onError } = require('./utils');
const { buildsURL } = require('./config');
async function getZoteroNoteEditor(signatures) {
const t1 = Date.now();
const modulePath = path.join(__dirname, '..', 'note-editor');
const { stdout } = await exec('git rev-parse HEAD', { cwd: modulePath });
const hash = stdout.trim();
if (!('note-editor' in signatures) || signatures['note-editor'].hash !== hash) {
const targetDir = path.join(__dirname, '..', 'build', 'resource', 'note-editor');
try {
const filename = hash + '.zip';
const tmpDir = path.join(__dirname, '..', 'tmp', 'builds', 'note-editor');
const url = buildsURL + 'client-note-editor/' + filename;
await fs.remove(targetDir);
await fs.ensureDir(targetDir);
await fs.ensureDir(tmpDir);
await exec(
`cd ${tmpDir}`
+ ` && (test -f ${filename} || curl -f ${url} -o ${filename})`
+ ` && unzip ${filename} zotero/* -d ${targetDir}`
+ ` && mv ${path.join(targetDir, 'zotero', '*')} ${targetDir}`
);
await fs.remove(path.join(targetDir, 'zotero'));
}
catch (e) {
await exec('npm ci', { cwd: modulePath });
await exec('npm run build', { cwd: modulePath });
await fs.copy(path.join(modulePath, 'build', 'zotero'), targetDir);
}
signatures['note-editor'] = { hash };
}
const t2 = Date.now();
return {
action: 'note-editor',
count: 1,
totalCount: 1,
processingTime: t2 - t1
};
}
module.exports = getZoteroNoteEditor;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getZoteroNoteEditor(signatures));
await writeSignatures(signatures);
}
catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,71 +0,0 @@
'use strict';
const fs = require('fs-extra');
const path = require('path');
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const { getSignatures, writeSignatures, onSuccess, onError } = require('./utils');
const { buildsURL } = require('./config');
async function getPDFReader(signatures) {
const t1 = Date.now();
const modulePath = path.join(__dirname, '..', 'pdf-reader');
const { stdout } = await exec('git rev-parse HEAD', { cwd: modulePath });
const hash = stdout.trim();
if (!('pdf-reader' in signatures) || signatures['pdf-reader'].hash !== hash) {
const targetDir = path.join(__dirname, '..', 'build', 'resource', 'pdf-reader');
try {
const filename = hash + '.zip';
const tmpDir = path.join(__dirname, '..', 'tmp', 'builds', 'pdf-reader');
const url = buildsURL + 'client-pdf-reader/' + filename;
await fs.remove(targetDir);
await fs.ensureDir(targetDir);
await fs.ensureDir(tmpDir);
await exec(
`cd ${tmpDir}`
+ ` && (test -f ${filename} || curl -f ${url} -o ${filename})`
+ ` && unzip ${filename} zotero/* -d ${targetDir}`
+ ` && mv ${path.join(targetDir, 'zotero', '*')} ${targetDir}`
);
await fs.remove(path.join(targetDir, 'zotero'));
}
catch (e) {
await exec('npm ci', { cwd: modulePath });
await exec('npm run build', { cwd: modulePath });
await fs.copy(path.join(modulePath, 'build', 'zotero'), targetDir);
}
signatures['pdf-reader'] = { hash };
}
const t2 = Date.now();
return {
action: 'pdf-reader',
count: 1,
totalCount: 1,
processingTime: t2 - t1
};
}
module.exports = getPDFReader;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getPDFReader(signatures));
await writeSignatures(signatures);
}
catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,68 +0,0 @@
'use strict';
const fs = require('fs-extra');
const path = require('path');
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const { getSignatures, writeSignatures, onSuccess, onError } = require('./utils');
const { buildsURL } = require('./config');
async function getPDFWorker(signatures) {
const t1 = Date.now();
const modulePath = path.join(__dirname, '..', 'pdf-worker');
const { stdout } = await exec('git rev-parse HEAD', { cwd: modulePath });
const hash = stdout.trim();
if (!('pdf-worker' in signatures) || signatures['pdf-worker'].hash !== hash) {
const targetDir = path.join(__dirname, '..', 'build', 'chrome', 'content', 'zotero', 'xpcom', 'pdfWorker');
try {
const filename = hash + '.zip';
const tmpDir = path.join(__dirname, '..', 'tmp', 'builds', 'pdf-worker');
const url = buildsURL + 'client-pdf-worker/' + filename;
await fs.remove(targetDir);
await fs.ensureDir(targetDir);
await fs.ensureDir(tmpDir);
await exec(
`cd ${tmpDir}`
+ ` && (test -f ${filename} || curl -f ${url} -o ${filename})`
+ ` && unzip -o ${filename} -d ${targetDir}`
);
}
catch (e) {
await exec('npm ci', { cwd: modulePath });
await exec('npm run build', { cwd: modulePath });
await fs.copy(path.join(modulePath, 'build', 'worker.js'), path.join(targetDir, 'worker.js'));
}
signatures['pdf-worker'] = { hash };
}
const t2 = Date.now();
return {
action: 'pdf-worker',
count: 1,
totalCount: 1,
processingTime: t2 - t1
};
}
module.exports = getPDFWorker;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getPDFWorker(signatures));
await writeSignatures(signatures);
}
catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,94 +0,0 @@
/* eslint-disable no-await-in-loop */
const universalify = require('universalify');
const sass = require('sass');
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress, getPathRelativeTo } = require('./utils');
const { scssFiles, ignoreMask } = require('./config');
const sassRender = universalify.fromCallback(sass.render);
const ROOT = path.resolve(__dirname, '..');
async function getSass(source, options, signatures = {}) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options));
const totalCount = files.length;
const outFiles = [];
var shouldRebuild = false;
for (const f of files) {
// if any file changed, rebuild all onSuccess
let newFileSignature = await getFileSignature(f);
if (!compareSignatures(newFileSignature, signatures[f])) {
signatures[f] = newFileSignature;
shouldRebuild = true;
}
}
var f;
if (shouldRebuild) {
const filesToBuild = files.filter(f => !path.basename(f).startsWith('_'));
while ((f = filesToBuild.pop())) {
let newFileSignature = await getFileSignature(f);
let destFile = getPathRelativeTo(f, 'scss');
destFile = path.join(path.dirname(destFile), path.basename(destFile, '.scss') + '.css');
let dest = path.join('build', 'chrome', 'skin', 'default', 'zotero', destFile);
if (['win', 'mac', 'unix'].some(platform => f.endsWith(`-${platform}.scss`))) {
let platform = f.slice(f.lastIndexOf('-') + 1, f.lastIndexOf('.'));
destFile = destFile.slice(0, destFile.lastIndexOf('-'))
+ destFile.slice(destFile.lastIndexOf('-') + 1 + platform.length);
dest = path.join('build', 'chrome', 'content', 'zotero-platform', platform, destFile);
}
try {
const sass = await sassRender({
file: f,
outFile: dest,
sourceMap: true,
outputStyle: 'compressed'
});
await fs.outputFile(dest, sass.css);
await fs.outputFile(`${dest}.map`, sass.map);
onProgress(f, dest, 'sass');
signatures[f] = newFileSignature;
outFiles.push(dest);
}
catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
}
const t2 = Date.now();
return {
action: 'sass',
count: outFiles.length,
outFiles,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getSass;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
for (var i = 0; i < scssFiles.length; i++) {
onSuccess(await getSass(scssFiles[i], { ignore: ignoreMask }, signatures));
}
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
}
catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,89 +0,0 @@
'use strict';
const path = require('path');
const fs = require('fs-extra');
const globby = require('globby');
const { isWindows, formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
const { dirs, symlinkDirs, copyDirs, symlinkFiles, ignoreMask } = require('./config');
const ROOT = path.resolve(__dirname, '..');
//@TODO: change signature to getSymlinks(source, options, signatures)
// here and elsewhere
//
// run symlinks twice, once for files (with nodir: true)
// once for dirs
async function getSymlinks(source, options, signatures) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
const filesDonePreviously = [];
for (const [f, signature] of Object.entries(signatures)) {
if ('isSymlinked' in signature && signature.isSymlinked) {
try {
await fs.access(path.join('build', f), fs.constants.F_OK);
// file found in signatures and build/ dir, skip
filesDonePreviously.push(f);
} catch (_) {
// file not found, needs symlinking
}
}
}
const filesToProcess = files.filter(f => !filesDonePreviously.includes(f));
const filesProcessedCount = filesToProcess.length;
var f;
while ((f = filesToProcess.pop()) != null) {
const dest = path.join('build', f);
try {
if (isWindows) {
await fs.copy(f, dest, { dereference: true });
} else {
await fs.ensureSymlink(f, dest);
}
signatures[f] = {
isSymlinked: true
};
onProgress(f, dest, 'ln');
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
const t2 = Date.now();
return {
action: 'symlink',
count: filesProcessedCount,
outFiles: files.map(f => path.join('build', f)), // also includes files that has already been symlinked
totalCount: files.length,
processingTime: t2 - t1
};
}
module.exports = getSymlinks;
if (require.main === module) {
(async () => {
try {
const source = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.jsx`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.scss`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
const signatures = await getSignatures();
onSuccess(await getSymlinks(source, { nodir: true, ignore: ignoreMask }, signatures));
onSuccess(await getSymlinks(symlinkDirs, {}, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

View file

@ -1,136 +0,0 @@
const path = require('path');
const fs = require('fs-extra');
const colors = require('colors/safe');
const green = colors.green;
const blue = colors.blue;
const yellow = colors.yellow;
const isWindows = /^win/.test(process.platform);
const ROOT = path.resolve(__dirname, '..');
const NODE_ENV = process.env.NODE_ENV;
function onError(err) {
console.log('\u0007'); //🔔
console.log(colors.red('Error:'), err);
}
function onSuccess(result) {
var msg = `${green('Success:')} ${blue(`[${result.action}]`)} ${result.count} files processed`;
if (result.totalCount) {
msg += ` | ${result.totalCount} checked`;
}
msg += ` [${yellow(`${result.processingTime}ms`)}]`;
console.log(msg);
}
function onProgress(sourcefile, outfile, operation) {
if ('isError' in global && global.isError) {
return;
}
if (NODE_ENV == 'debug') {
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile} -> ${outfile}`);
} else {
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile}`);
}
}
async function getSignatures() {
let signaturesFile = path.resolve(ROOT, '.signatures.json');
var signatures = {};
try {
signatures = await fs.readJson(signaturesFile);
} catch (_) {
// if signatures files doesn't exist, return empty object instead
}
return signatures;
}
async function writeSignatures(signatures) {
let signaturesFile = path.resolve(ROOT, '.signatures.json');
NODE_ENV == 'debug' && console.log('writing signatures to .signatures.json');
await fs.outputJson(signaturesFile, signatures);
}
async function cleanUp(signatures) {
const t1 = Date.now();
var removedCount = 0, invalidCount = 0;
for (let f of Object.keys(signatures)) {
try {
// check if file from signatures exists in source
await fs.access(f, fs.constants.F_OK);
} catch (_) {
invalidCount++;
NODE_ENV == 'debug' && console.log(`File ${f} found in signatures but not in src, deleting from build`);
try {
await fs.remove(path.join('build', f));
removedCount++;
} catch (_) {
// file wasn't in the build either
}
delete signatures[f];
}
}
const t2 = Date.now();
return {
action: 'cleanup',
count: removedCount,
totalCount: invalidCount,
processingTime: t2 - t1
};
}
async function getFileSignature(file) {
let stats = await fs.stat(file);
return {
mode: stats.mode,
mtime: stats.mtimeMs || stats.mtime.getTime(),
isDirectory: stats.isDirectory(),
isFile: stats.isFile()
};
}
function compareSignatures(a, b) {
return typeof a === 'object'
&& typeof b === 'object'
&& a != null
&& b != null
&& ['mode', 'mtime', 'isDirectory', 'isFile'].reduce((acc, k) => {
return acc ? k in a && k in b && a[k] == b[k] : false;
}, true);
}
function getPathRelativeTo(f, dirName) {
return path.relative(path.join(ROOT, dirName), path.join(ROOT, f));
}
const formatDirsForMatcher = dirs => {
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
};
function comparePaths(actualPath, testedPath) {
// compare paths after normalizing os-specific path separator
return path.normalize(actualPath) === path.normalize(testedPath);
}
const envCheckTrue = env => !!(env && (parseInt(env) || env === true || env === "true"));
module.exports = {
cleanUp,
comparePaths,
compareSignatures,
envCheckTrue,
formatDirsForMatcher,
getFileSignature,
getPathRelativeTo,
getSignatures,
isWindows,
onError,
onProgress,
onSuccess,
writeSignatures,
};

View file

@ -1,138 +0,0 @@
const path = require('path');
const fs = require('fs-extra');
const chokidar = require('chokidar');
const multimatch = require('multimatch');
const { exec } = require('child_process');
const { dirs, jsFiles, scssFiles, ignoreMask, copyDirs, symlinkFiles } = require('./config');
const { envCheckTrue, onSuccess, onError, getSignatures, writeSignatures, cleanUp, formatDirsForMatcher } = require('./utils');
const getJS = require('./js');
const getSass = require('./sass');
const getCopy = require('./copy');
const getSymlinks = require('./symlinks');
const ROOT = path.resolve(__dirname, '..');
const addOmniExecPath = path.join(ROOT, '..', 'zotero-standalone-build', 'scripts', 'add_omni_file');
let shouldAddOmni = false;
const source = [
'chrome',
'components',
'defaults',
'resource',
'scss',
'test',
'styles',
'translators',
'scss',
'chrome/**',
'components/**',
'defaults/**',
'resource/**',
'scss/**',
'test/**',
'styles/**',
'translators/**',
'scss/**'
];
const symlinks = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.jsx`])
.concat([`!${formatDirsForMatcher(dirs)}/**/*.scss`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
var signatures;
process.on('SIGINT', () => {
writeSignatures(signatures);
process.exit();
});
async function addOmniFiles(relPaths) {
const t1 = Date.now();
const buildDirPath = path.join(ROOT, 'build');
const wrappedPaths = relPaths.map(relPath => `"${path.relative(buildDirPath, relPath)}"`);
await new Promise((resolve, reject) => {
const cmd = `"${addOmniExecPath}" ${wrappedPaths.join(' ')}`;
exec(cmd, { cwd: buildDirPath }, (error, output) => {
if (error) {
reject(error);
}
else {
process.env.NODE_ENV === 'debug' && console.log(`Executed:\n${cmd};\nOutput:\n${output}\n`);
resolve(output);
}
});
});
const t2 = Date.now();
return {
action: 'add-omni-files',
count: relPaths.length,
totalCount: relPaths.length,
processingTime: t2 - t1
};
}
async function getWatch() {
try {
await fs.access(addOmniExecPath, fs.constants.F_OK);
shouldAddOmni = !envCheckTrue(process.env.SKIP_OMNI);
}
catch (_) {}
let watcher = chokidar.watch(source, { cwd: ROOT })
.on('change', async (path) => {
try {
var result = false;
if (multimatch(path, jsFiles).length && !multimatch(path, ignoreMask).length) {
result = await getJS(path, { ignore: ignoreMask }, signatures);
onSuccess(await cleanUp(signatures));
}
if (!result) {
for (var i = 0; i < scssFiles.length; i++) {
if (multimatch(path, scssFiles[i]).length) {
result = await getSass(scssFiles[i], { ignore: ignoreMask }); // eslint-disable-line no-await-in-loop
break;
}
}
}
if (!result && multimatch(path, copyDirs.map(d => `${d}/**`)).length) {
result = await getCopy(path, {}, signatures);
}
if (!result && multimatch(path, symlinks).length) {
result = await getSymlinks(path, { nodir: true }, signatures);
}
onSuccess(result);
onSuccess(await cleanUp(signatures));
if (shouldAddOmni && result.outFiles?.length) {
onSuccess(await addOmniFiles(result.outFiles));
}
}
catch (err) {
onError(err);
}
})
.on('unlink', async () => {
const signatures = await getSignatures();
onSuccess(await cleanUp(signatures));
});
watcher.add(source);
console.log(`Watching files for changes (omni updates ${shouldAddOmni ? 'enabled' : 'disabled'})...`);
}
module.exports = getWatch;
if (require.main === module) {
(async () => {
signatures = await getSignatures();
getWatch();
})();
}