build: enable JS semicolons (#22783)
This commit is contained in:
parent
24e21467b9
commit
5d657dece4
354 changed files with 21512 additions and 21510 deletions
|
@ -1,38 +1,38 @@
|
|||
const cp = require('child_process')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const certificatePath = process.argv[2]
|
||||
const outPath = process.argv[3]
|
||||
const templatePath = path.resolve(__dirname, 'trust.xml')
|
||||
const certificatePath = process.argv[2];
|
||||
const outPath = process.argv[3];
|
||||
const templatePath = path.resolve(__dirname, 'trust.xml');
|
||||
|
||||
const template = fs.readFileSync(templatePath, 'utf8')
|
||||
const template = fs.readFileSync(templatePath, 'utf8');
|
||||
|
||||
const fingerprintResult = cp.spawnSync('openssl', ['x509', '-noout', '-fingerprint', '-sha1', '-in', certificatePath])
|
||||
const fingerprintResult = cp.spawnSync('openssl', ['x509', '-noout', '-fingerprint', '-sha1', '-in', certificatePath]);
|
||||
if (fingerprintResult.status !== 0) {
|
||||
console.error(fingerprintResult.stderr.toString())
|
||||
process.exit(1)
|
||||
console.error(fingerprintResult.stderr.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const fingerprint = fingerprintResult.stdout.toString().replace(/^SHA1 Fingerprint=/, '').replace(/:/g, '').trim()
|
||||
const fingerprint = fingerprintResult.stdout.toString().replace(/^SHA1 Fingerprint=/, '').replace(/:/g, '').trim();
|
||||
|
||||
const serialResult = cp.spawnSync('openssl', ['x509', '-serial', '-noout', '-in', certificatePath])
|
||||
const serialResult = cp.spawnSync('openssl', ['x509', '-serial', '-noout', '-in', certificatePath]);
|
||||
if (serialResult.status !== 0) {
|
||||
console.error(serialResult.stderr.toString())
|
||||
process.exit(1)
|
||||
console.error(serialResult.stderr.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let serialHex = serialResult.stdout.toString().replace(/^serial=/, '').trim()
|
||||
let serialHex = serialResult.stdout.toString().replace(/^serial=/, '').trim();
|
||||
// Pad the serial number out to 18 hex chars
|
||||
while (serialHex.length < 18) {
|
||||
serialHex = `0${serialHex}`
|
||||
serialHex = `0${serialHex}`;
|
||||
}
|
||||
const serialB64 = Buffer.from(serialHex, 'hex').toString('base64')
|
||||
const serialB64 = Buffer.from(serialHex, 'hex').toString('base64');
|
||||
|
||||
const trust = template
|
||||
.replace(/{{FINGERPRINT}}/g, fingerprint)
|
||||
.replace(/{{SERIAL_BASE64}}/g, serialB64)
|
||||
.replace(/{{SERIAL_BASE64}}/g, serialB64);
|
||||
|
||||
fs.writeFileSync(outPath, trust)
|
||||
fs.writeFileSync(outPath, trust);
|
||||
|
||||
console.log('Generated Trust Settings')
|
||||
console.log('Generated Trust Settings');
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
const args = require('minimist')(process.argv.slice(2))
|
||||
const octokit = require('@octokit/rest')()
|
||||
const path = require('path')
|
||||
const args = require('minimist')(process.argv.slice(2));
|
||||
const octokit = require('@octokit/rest')();
|
||||
const path = require('path');
|
||||
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname))
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname));
|
||||
|
||||
async function checkIfDocOnlyChange () {
|
||||
if (args.prNumber || args.prBranch || args.prURL) {
|
||||
try {
|
||||
let pullRequestNumber = args.prNumber
|
||||
let pullRequestNumber = args.prNumber;
|
||||
if (!pullRequestNumber || isNaN(pullRequestNumber)) {
|
||||
if (args.prURL) {
|
||||
// CircleCI doesn't provide the PR number for branch builds, but it does provide the PR URL
|
||||
const pullRequestParts = args.prURL.split('/')
|
||||
pullRequestNumber = pullRequestParts[pullRequestParts.length - 1]
|
||||
const pullRequestParts = args.prURL.split('/');
|
||||
pullRequestNumber = pullRequestParts[pullRequestParts.length - 1];
|
||||
} else if (args.prBranch) {
|
||||
// AppVeyor doesn't provide a PR number for branch builds - figure it out from the branch
|
||||
const prsForBranch = await octokit.pulls.list({
|
||||
|
@ -20,39 +20,39 @@ async function checkIfDocOnlyChange () {
|
|||
repo: 'electron',
|
||||
state: 'open',
|
||||
head: `electron:${args.prBranch}`
|
||||
})
|
||||
});
|
||||
if (prsForBranch.data.length === 1) {
|
||||
pullRequestNumber = prsForBranch.data[0].number
|
||||
pullRequestNumber = prsForBranch.data[0].number;
|
||||
} else {
|
||||
// If there are 0 PRs or more than one PR on a branch, just assume that this is more than a doc change
|
||||
process.exit(1)
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
const filesChanged = await octokit.pulls.listFiles({
|
||||
owner: 'electron', repo: 'electron', pull_number: pullRequestNumber
|
||||
})
|
||||
});
|
||||
|
||||
const nonDocChange = filesChanged.data.find((fileInfo) => {
|
||||
const fileDirs = fileInfo.filename.split('/')
|
||||
const fileDirs = fileInfo.filename.split('/');
|
||||
if (fileDirs[0] !== 'docs') {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
})
|
||||
});
|
||||
if (nonDocChange) {
|
||||
process.exit(1)
|
||||
process.exit(1);
|
||||
} else {
|
||||
process.exit(0)
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (ex) {
|
||||
console.error('Error getting list of files changed: ', ex)
|
||||
process.exit(-1)
|
||||
console.error('Error getting list of files changed: ', ex);
|
||||
process.exit(-1);
|
||||
}
|
||||
} else {
|
||||
console.error(`Check if only the docs were changed for a commit.
|
||||
Usage: doc-only-change.js --prNumber=PR_NUMBER || --prBranch=PR_BRANCH || --prURL=PR_URL`)
|
||||
process.exit(-1)
|
||||
Usage: doc-only-change.js --prNumber=PR_NUMBER || --prBranch=PR_BRANCH || --prURL=PR_URL`);
|
||||
process.exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
checkIfDocOnlyChange()
|
||||
checkIfDocOnlyChange();
|
||||
|
|
|
@ -1,38 +1,38 @@
|
|||
const args = require('minimist')(process.argv.slice(2))
|
||||
const nugget = require('nugget')
|
||||
const request = require('request')
|
||||
const args = require('minimist')(process.argv.slice(2));
|
||||
const nugget = require('nugget');
|
||||
const request = require('request');
|
||||
|
||||
async function makeRequest (requestOptions, parseResponse) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request(requestOptions, (err, res, body) => {
|
||||
if (!err && res.statusCode >= 200 && res.statusCode < 300) {
|
||||
if (parseResponse) {
|
||||
const build = JSON.parse(body)
|
||||
resolve(build)
|
||||
const build = JSON.parse(body);
|
||||
resolve(build);
|
||||
} else {
|
||||
resolve(body)
|
||||
resolve(body);
|
||||
}
|
||||
} else {
|
||||
if (args.verbose) {
|
||||
console.error('Error occurred while requesting:', requestOptions.url)
|
||||
console.error('Error occurred while requesting:', requestOptions.url);
|
||||
if (parseResponse) {
|
||||
try {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body), requestOptions);
|
||||
} catch (err) {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions);
|
||||
}
|
||||
} else {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body, requestOptions);
|
||||
}
|
||||
}
|
||||
reject(err)
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadArtifact (name, buildNum, dest) {
|
||||
const circleArtifactUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/${args.buildNum}/artifacts?circle-token=${process.env.CIRCLE_TOKEN}`
|
||||
const circleArtifactUrl = `https://circleci.com/api/v1.1/project/github/electron/electron/${args.buildNum}/artifacts?circle-token=${process.env.CIRCLE_TOKEN}`;
|
||||
const artifacts = await makeRequest({
|
||||
method: 'GET',
|
||||
url: circleArtifactUrl,
|
||||
|
@ -42,47 +42,47 @@ async function downloadArtifact (name, buildNum, dest) {
|
|||
}
|
||||
}, true).catch(err => {
|
||||
if (args.verbose) {
|
||||
console.log('Error calling CircleCI:', err)
|
||||
console.log('Error calling CircleCI:', err);
|
||||
} else {
|
||||
console.error('Error calling CircleCI to get artifact details')
|
||||
console.error('Error calling CircleCI to get artifact details');
|
||||
}
|
||||
})
|
||||
});
|
||||
const artifactToDownload = artifacts.find(artifact => {
|
||||
return (artifact.path === name)
|
||||
})
|
||||
return (artifact.path === name);
|
||||
});
|
||||
if (!artifactToDownload) {
|
||||
console.log(`Could not find artifact called ${name} to download for build #${buildNum}.`)
|
||||
process.exit(1)
|
||||
console.log(`Could not find artifact called ${name} to download for build #${buildNum}.`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log(`Downloading ${artifactToDownload.url}.`)
|
||||
let downloadError = false
|
||||
console.log(`Downloading ${artifactToDownload.url}.`);
|
||||
let downloadError = false;
|
||||
await downloadWithRetry(artifactToDownload.url, dest).catch(err => {
|
||||
if (args.verbose) {
|
||||
console.log(`${artifactToDownload.url} could not be successfully downloaded. Error was:`, err)
|
||||
console.log(`${artifactToDownload.url} could not be successfully downloaded. Error was:`, err);
|
||||
} else {
|
||||
console.log(`${artifactToDownload.url} could not be successfully downloaded.`)
|
||||
console.log(`${artifactToDownload.url} could not be successfully downloaded.`);
|
||||
}
|
||||
downloadError = true
|
||||
})
|
||||
downloadError = true;
|
||||
});
|
||||
if (!downloadError) {
|
||||
console.log(`Successfully downloaded ${name}.`)
|
||||
console.log(`Successfully downloaded ${name}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadWithRetry (url, directory) {
|
||||
let lastError
|
||||
const downloadURL = `${url}?circle-token=${process.env.CIRCLE_TOKEN}`
|
||||
let lastError;
|
||||
const downloadURL = `${url}?circle-token=${process.env.CIRCLE_TOKEN}`;
|
||||
for (let i = 0; i < 5; i++) {
|
||||
console.log(`Attempting to download ${url} - attempt #${(i + 1)}`)
|
||||
console.log(`Attempting to download ${url} - attempt #${(i + 1)}`);
|
||||
try {
|
||||
return await downloadFile(downloadURL, directory)
|
||||
return await downloadFile(downloadURL, directory);
|
||||
} catch (err) {
|
||||
lastError = err
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, 30000))
|
||||
lastError = err;
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, 30000));
|
||||
}
|
||||
}
|
||||
throw lastError
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
function downloadFile (url, directory) {
|
||||
|
@ -90,21 +90,21 @@ function downloadFile (url, directory) {
|
|||
const nuggetOpts = {
|
||||
dir: directory,
|
||||
quiet: args.verbose
|
||||
}
|
||||
};
|
||||
nugget(url, nuggetOpts, (err) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
resolve()
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (!args.name || !args.buildNum || !args.dest) {
|
||||
console.log(`Download CircleCI artifacts.
|
||||
Usage: download-circleci-artifacts.js [--buildNum=CIRCLE_BUILD_NUMBER] [--name=artifactName] [--dest] [--verbose]`)
|
||||
process.exit(0)
|
||||
Usage: download-circleci-artifacts.js [--buildNum=CIRCLE_BUILD_NUMBER] [--name=artifactName] [--dest] [--verbose]`);
|
||||
process.exit(0);
|
||||
} else {
|
||||
downloadArtifact(args.name, args.buildNum, args.dest)
|
||||
downloadArtifact(args.name, args.buildNum, args.dest);
|
||||
}
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
const cp = require('child_process')
|
||||
const fs = require('fs-extra')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const rootPath = path.resolve(__dirname, '..')
|
||||
const gniPath = path.resolve(__dirname, '../filenames.auto.gni')
|
||||
const rootPath = path.resolve(__dirname, '..');
|
||||
const gniPath = path.resolve(__dirname, '../filenames.auto.gni');
|
||||
|
||||
const allDocs = fs.readdirSync(path.resolve(__dirname, '../docs/api'))
|
||||
.map(doc => `docs/api/${doc}`)
|
||||
.concat(
|
||||
fs.readdirSync(path.resolve(__dirname, '../docs/api/structures'))
|
||||
.map(doc => `docs/api/structures/${doc}`)
|
||||
)
|
||||
);
|
||||
|
||||
const main = async () => {
|
||||
const webpackTargets = [
|
||||
|
@ -39,30 +39,30 @@ const main = async () => {
|
|||
name: 'worker_bundle_deps',
|
||||
config: 'webpack.config.worker.js'
|
||||
}
|
||||
]
|
||||
];
|
||||
|
||||
await Promise.all(webpackTargets.map(async webpackTarget => {
|
||||
const tmpDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-filenames-'))
|
||||
const tmpDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-filenames-'));
|
||||
const child = cp.spawn('node', [
|
||||
'build/webpack/get-outputs.js',
|
||||
`./${webpackTarget.config}`,
|
||||
path.resolve(tmpDir, `${webpackTarget.name}.measure.js`)
|
||||
], {
|
||||
cwd: path.resolve(__dirname, '..')
|
||||
})
|
||||
let output = ''
|
||||
});
|
||||
let output = '';
|
||||
child.stdout.on('data', chunk => {
|
||||
output += chunk.toString()
|
||||
})
|
||||
child.stderr.on('data', chunk => console.error(chunk.toString()))
|
||||
output += chunk.toString();
|
||||
});
|
||||
child.stderr.on('data', chunk => console.error(chunk.toString()));
|
||||
await new Promise((resolve, reject) => child.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
console.error(output)
|
||||
return reject(new Error(`Failed to list webpack dependencies for entry: ${webpackTarget.name}`))
|
||||
console.error(output);
|
||||
return reject(new Error(`Failed to list webpack dependencies for entry: ${webpackTarget.name}`));
|
||||
}
|
||||
|
||||
resolve()
|
||||
}))
|
||||
resolve();
|
||||
}));
|
||||
|
||||
webpackTarget.dependencies = JSON.parse(output)
|
||||
// Remove whitespace
|
||||
|
@ -76,9 +76,9 @@ const main = async () => {
|
|||
// All webpack builds depend on the tsconfig and package json files
|
||||
.concat(['tsconfig.json', 'tsconfig.electron.json', 'package.json'])
|
||||
// Make the generated list easier to read
|
||||
.sort()
|
||||
await fs.remove(tmpDir)
|
||||
}))
|
||||
.sort();
|
||||
await fs.remove(tmpDir);
|
||||
}));
|
||||
|
||||
fs.writeFileSync(
|
||||
gniPath,
|
||||
|
@ -92,12 +92,12 @@ ${webpackTargets.map(target => ` ${target.name} = [
|
|||
${target.dependencies.map(dep => ` "${dep}",`).join('\n')}
|
||||
]`).join('\n\n')}
|
||||
}
|
||||
`)
|
||||
}
|
||||
`);
|
||||
};
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const check = process.argv.includes('--check')
|
||||
const check = process.argv.includes('--check');
|
||||
|
||||
const dictsPath = path.resolve(__dirname, '..', '..', 'third_party', 'hunspell_dictionaries')
|
||||
const gclientPath = 'third_party/hunspell_dictionaries'
|
||||
const dictsPath = path.resolve(__dirname, '..', '..', 'third_party', 'hunspell_dictionaries');
|
||||
const gclientPath = 'third_party/hunspell_dictionaries';
|
||||
|
||||
const allFiles = fs.readdirSync(dictsPath)
|
||||
const allFiles = fs.readdirSync(dictsPath);
|
||||
|
||||
const dictionaries = allFiles
|
||||
.filter(file => path.extname(file) === '.bdic')
|
||||
.filter(file => path.extname(file) === '.bdic');
|
||||
|
||||
const licenses = allFiles
|
||||
.filter(file => file.startsWith('LICENSE') || file.startsWith('COPYING'))
|
||||
.filter(file => file.startsWith('LICENSE') || file.startsWith('COPYING'));
|
||||
|
||||
const content = `hunspell_dictionaries = [
|
||||
${dictionaries.map(f => `"//${path.posix.join(gclientPath, f)}"`).join(',\n ')},
|
||||
|
@ -21,15 +21,15 @@ const content = `hunspell_dictionaries = [
|
|||
hunspell_licenses = [
|
||||
${licenses.map(f => `"//${path.posix.join(gclientPath, f)}"`).join(',\n ')},
|
||||
]
|
||||
`
|
||||
`;
|
||||
|
||||
const filenamesPath = path.resolve(__dirname, '..', 'filenames.hunspell.gni')
|
||||
const filenamesPath = path.resolve(__dirname, '..', 'filenames.hunspell.gni');
|
||||
|
||||
if (check) {
|
||||
const currentContent = fs.readFileSync(filenamesPath, 'utf8')
|
||||
const currentContent = fs.readFileSync(filenamesPath, 'utf8');
|
||||
if (currentContent !== content) {
|
||||
throw new Error('hunspell filenames need to be regenerated, latest generation does not match current file. Please run node gen-hunspell-filenames.js')
|
||||
throw new Error('hunspell filenames need to be regenerated, latest generation does not match current file. Please run node gen-hunspell-filenames.js');
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(filenamesPath, content)
|
||||
fs.writeFileSync(filenamesPath, content);
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Fallback to blow away old cache keys
|
||||
const HASH_VERSION = 3
|
||||
const HASH_VERSION = 3;
|
||||
|
||||
// Base files to hash
|
||||
const filesToHash = [
|
||||
|
@ -11,42 +11,42 @@ const filesToHash = [
|
|||
path.resolve(__dirname, '../yarn.lock'),
|
||||
path.resolve(__dirname, '../script/external-binaries.json'),
|
||||
path.resolve(__dirname, '../script/sysroots.json')
|
||||
]
|
||||
];
|
||||
|
||||
const addAllFiles = (dir) => {
|
||||
for (const child of fs.readdirSync(dir).sort()) {
|
||||
const childPath = path.resolve(dir, child)
|
||||
const childPath = path.resolve(dir, child);
|
||||
if (fs.statSync(childPath).isDirectory()) {
|
||||
addAllFiles(childPath)
|
||||
addAllFiles(childPath);
|
||||
} else {
|
||||
filesToHash.push(childPath)
|
||||
filesToHash.push(childPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Add all patch files to the hash
|
||||
addAllFiles(path.resolve(__dirname, '../patches'))
|
||||
addAllFiles(path.resolve(__dirname, '../patches'));
|
||||
|
||||
// Create Hash
|
||||
const hasher = crypto.createHash('SHA256')
|
||||
hasher.update(`HASH_VERSION:${HASH_VERSION}`)
|
||||
const hasher = crypto.createHash('SHA256');
|
||||
hasher.update(`HASH_VERSION:${HASH_VERSION}`);
|
||||
for (const file of filesToHash) {
|
||||
hasher.update(fs.readFileSync(file))
|
||||
hasher.update(fs.readFileSync(file));
|
||||
}
|
||||
|
||||
// Add the GCLIENT_EXTRA_ARGS variable to the hash
|
||||
const extraArgs = process.env.GCLIENT_EXTRA_ARGS || 'no_extra_args'
|
||||
hasher.update(extraArgs)
|
||||
const extraArgs = process.env.GCLIENT_EXTRA_ARGS || 'no_extra_args';
|
||||
hasher.update(extraArgs);
|
||||
|
||||
const effectivePlatform = extraArgs.includes('host_os=mac') ? 'darwin' : process.platform
|
||||
const effectivePlatform = extraArgs.includes('host_os=mac') ? 'darwin' : process.platform;
|
||||
|
||||
// Write the hash to disk
|
||||
fs.writeFileSync(path.resolve(__dirname, '../.depshash'), hasher.digest('hex'))
|
||||
fs.writeFileSync(path.resolve(__dirname, '../.depshash'), hasher.digest('hex'));
|
||||
|
||||
let targetContent = `${effectivePlatform}\n${process.env.TARGET_ARCH}\n${process.env.GN_CONFIG}\n${undefined}\n${process.env.GN_EXTRA_ARGS}\n${process.env.GN_BUILDFLAG_ARGS}`
|
||||
const argsDir = path.resolve(__dirname, '../build/args')
|
||||
let targetContent = `${effectivePlatform}\n${process.env.TARGET_ARCH}\n${process.env.GN_CONFIG}\n${undefined}\n${process.env.GN_EXTRA_ARGS}\n${process.env.GN_BUILDFLAG_ARGS}`;
|
||||
const argsDir = path.resolve(__dirname, '../build/args');
|
||||
for (const argFile of fs.readdirSync(argsDir).sort()) {
|
||||
targetContent += `\n${argFile}--${crypto.createHash('SHA1').update(fs.readFileSync(path.resolve(argsDir, argFile))).digest('hex')}`
|
||||
targetContent += `\n${argFile}--${crypto.createHash('SHA1').update(fs.readFileSync(path.resolve(argsDir, argFile))).digest('hex')}`;
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.resolve(__dirname, '../.depshash-target'), targetContent)
|
||||
fs.writeFileSync(path.resolve(__dirname, '../.depshash-target'), targetContent);
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
|
||||
const outputPath = process.argv[2]
|
||||
const outputPath = process.argv[2];
|
||||
|
||||
const currentVersion = fs.readFileSync(path.resolve(__dirname, '../ELECTRON_VERSION'), 'utf8').trim()
|
||||
const currentVersion = fs.readFileSync(path.resolve(__dirname, '../ELECTRON_VERSION'), 'utf8').trim();
|
||||
|
||||
const parsed = semver.parse(currentVersion)
|
||||
const parsed = semver.parse(currentVersion);
|
||||
|
||||
let prerelease = ''
|
||||
let prerelease = '';
|
||||
if (parsed.prerelease && parsed.prerelease.length > 0) {
|
||||
prerelease = parsed.prerelease.join('.')
|
||||
prerelease = parsed.prerelease.join('.');
|
||||
}
|
||||
|
||||
const {
|
||||
major,
|
||||
minor,
|
||||
patch
|
||||
} = parsed
|
||||
} = parsed;
|
||||
|
||||
fs.writeFileSync(outputPath, JSON.stringify({
|
||||
major,
|
||||
|
@ -25,4 +25,4 @@ fs.writeFileSync(outputPath, JSON.stringify({
|
|||
patch,
|
||||
prerelease,
|
||||
has_prerelease: prerelease === '' ? 0 : 1
|
||||
}, null, 2))
|
||||
}, null, 2));
|
||||
|
|
|
@ -1,63 +1,63 @@
|
|||
const asar = require('asar')
|
||||
const assert = require('assert')
|
||||
const fs = require('fs-extra')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const asar = require('asar');
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const getArgGroup = (name) => {
|
||||
const group = []
|
||||
let inGroup = false
|
||||
const group = [];
|
||||
let inGroup = false;
|
||||
for (const arg of process.argv) {
|
||||
// At the next flag we stop being in the current group
|
||||
if (arg.startsWith('--')) inGroup = false
|
||||
if (arg.startsWith('--')) inGroup = false;
|
||||
// Push all args in the group
|
||||
if (inGroup) group.push(arg)
|
||||
if (inGroup) group.push(arg);
|
||||
// If we find the start flag, start pushing
|
||||
if (arg === `--${name}`) inGroup = true
|
||||
if (arg === `--${name}`) inGroup = true;
|
||||
}
|
||||
|
||||
return group
|
||||
}
|
||||
return group;
|
||||
};
|
||||
|
||||
const base = getArgGroup('base')
|
||||
const files = getArgGroup('files')
|
||||
const out = getArgGroup('out')
|
||||
const base = getArgGroup('base');
|
||||
const files = getArgGroup('files');
|
||||
const out = getArgGroup('out');
|
||||
|
||||
assert(base.length === 1, 'should have a single base dir')
|
||||
assert(files.length >= 1, 'should have at least one input file')
|
||||
assert(out.length === 1, 'should have a single out path')
|
||||
assert(base.length === 1, 'should have a single base dir');
|
||||
assert(files.length >= 1, 'should have at least one input file');
|
||||
assert(out.length === 1, 'should have a single out path');
|
||||
|
||||
// Ensure all files are inside the base dir
|
||||
for (const file of files) {
|
||||
if (!file.startsWith(base[0])) {
|
||||
console.error(`Expected all files to be inside the base dir but "${file}" was not in "${base[0]}"`)
|
||||
process.exit(1)
|
||||
console.error(`Expected all files to be inside the base dir but "${file}" was not in "${base[0]}"`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const tmpPath = fs.mkdtempSync(path.resolve(os.tmpdir(), 'electron-gn-asar-'))
|
||||
const tmpPath = fs.mkdtempSync(path.resolve(os.tmpdir(), 'electron-gn-asar-'));
|
||||
|
||||
try {
|
||||
// Copy all files to a tmp dir to avoid including scrap files in the ASAR
|
||||
for (const file of files) {
|
||||
const newLocation = path.resolve(tmpPath, path.relative(base[0], file))
|
||||
fs.mkdirsSync(path.dirname(newLocation))
|
||||
fs.writeFileSync(newLocation, fs.readFileSync(file))
|
||||
const newLocation = path.resolve(tmpPath, path.relative(base[0], file));
|
||||
fs.mkdirsSync(path.dirname(newLocation));
|
||||
fs.writeFileSync(newLocation, fs.readFileSync(file));
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Unexpected error while generating ASAR', err)
|
||||
console.error('Unexpected error while generating ASAR', err);
|
||||
fs.remove(tmpPath)
|
||||
.then(() => process.exit(1))
|
||||
.catch(() => process.exit(1))
|
||||
return
|
||||
.catch(() => process.exit(1));
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the ASAR archive
|
||||
asar.createPackageWithOptions(tmpPath, out[0], {})
|
||||
.catch(err => {
|
||||
const exit = () => {
|
||||
console.error('Unexpected error while generating ASAR', err)
|
||||
process.exit(1)
|
||||
}
|
||||
fs.remove(tmpPath).then(exit).catch(exit)
|
||||
}).then(() => fs.remove(tmpPath))
|
||||
console.error('Unexpected error while generating ASAR', err);
|
||||
process.exit(1);
|
||||
};
|
||||
fs.remove(tmpPath).then(exit).catch(exit);
|
||||
}).then(() => fs.remove(tmpPath));
|
||||
|
|
|
@ -4,38 +4,38 @@ Usage:
|
|||
$ node ./script/gn-check.js [--outDir=dirName]
|
||||
*/
|
||||
|
||||
const cp = require('child_process')
|
||||
const path = require('path')
|
||||
const args = require('minimist')(process.argv.slice(2), { string: ['outDir'] })
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const args = require('minimist')(process.argv.slice(2), { string: ['outDir'] });
|
||||
|
||||
const { getOutDir } = require('./lib/utils')
|
||||
const { getOutDir } = require('./lib/utils');
|
||||
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname))
|
||||
const DEPOT_TOOLS = path.resolve(SOURCE_ROOT, '..', 'third_party', 'depot_tools')
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname));
|
||||
const DEPOT_TOOLS = path.resolve(SOURCE_ROOT, '..', 'third_party', 'depot_tools');
|
||||
|
||||
const OUT_DIR = getOutDir({ outDir: args.outDir })
|
||||
const OUT_DIR = getOutDir({ outDir: args.outDir });
|
||||
if (!OUT_DIR) {
|
||||
throw new Error('No viable out dir: one of Debug, Testing, or Release must exist.')
|
||||
throw new Error('No viable out dir: one of Debug, Testing, or Release must exist.');
|
||||
}
|
||||
|
||||
const env = Object.assign({
|
||||
CHROMIUM_BUILDTOOLS_PATH: path.resolve(SOURCE_ROOT, '..', 'buildtools'),
|
||||
DEPOT_TOOLS_WIN_TOOLCHAIN: '0'
|
||||
}, process.env)
|
||||
}, process.env);
|
||||
// Users may not have depot_tools in PATH.
|
||||
env.PATH = `${env.PATH}${path.delimiter}${DEPOT_TOOLS}`
|
||||
env.PATH = `${env.PATH}${path.delimiter}${DEPOT_TOOLS}`;
|
||||
|
||||
const gnCheckDirs = [
|
||||
'//electron:electron_lib',
|
||||
'//electron:electron_app',
|
||||
'//electron:manifests',
|
||||
'//electron/shell/common/api:mojo'
|
||||
]
|
||||
];
|
||||
|
||||
for (const dir of gnCheckDirs) {
|
||||
const args = ['check', `../out/${OUT_DIR}`, dir]
|
||||
const result = cp.spawnSync('gn', args, { env, stdio: 'inherit' })
|
||||
if (result.status !== 0) process.exit(result.status)
|
||||
const args = ['check', `../out/${OUT_DIR}`, dir];
|
||||
const result = cp.spawnSync('gn', args, { env, stdio: 'inherit' });
|
||||
if (result.status !== 0) process.exit(result.status);
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
process.exit(0);
|
||||
|
|
|
@ -1,95 +1,95 @@
|
|||
const { GitProcess } = require('dugite')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const { GitProcess } = require('dugite');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const ELECTRON_DIR = path.resolve(__dirname, '..', '..')
|
||||
const SRC_DIR = path.resolve(ELECTRON_DIR, '..')
|
||||
const ELECTRON_DIR = path.resolve(__dirname, '..', '..');
|
||||
const SRC_DIR = path.resolve(ELECTRON_DIR, '..');
|
||||
|
||||
const RELEASE_BRANCH_PATTERN = /(\d)+-(?:(?:[0-9]+-x$)|(?:x+-y$))/
|
||||
const RELEASE_BRANCH_PATTERN = /(\d)+-(?:(?:[0-9]+-x$)|(?:x+-y$))/;
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
function getElectronExec () {
|
||||
const OUT_DIR = getOutDir()
|
||||
const OUT_DIR = getOutDir();
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
return `out/${OUT_DIR}/Electron.app/Contents/MacOS/Electron`
|
||||
return `out/${OUT_DIR}/Electron.app/Contents/MacOS/Electron`;
|
||||
case 'win32':
|
||||
return `out/${OUT_DIR}/electron.exe`
|
||||
return `out/${OUT_DIR}/electron.exe`;
|
||||
case 'linux':
|
||||
return `out/${OUT_DIR}/electron`
|
||||
return `out/${OUT_DIR}/electron`;
|
||||
default:
|
||||
throw new Error('Unknown platform')
|
||||
throw new Error('Unknown platform');
|
||||
}
|
||||
}
|
||||
|
||||
function getOutDir (options = {}) {
|
||||
const shouldLog = options.shouldLog || false
|
||||
const presetDirs = ['Testing', 'Release', 'Default', 'Debug']
|
||||
const shouldLog = options.shouldLog || false;
|
||||
const presetDirs = ['Testing', 'Release', 'Default', 'Debug'];
|
||||
|
||||
if (options.outDir || process.env.ELECTRON_OUT_DIR) {
|
||||
const outDir = options.outDir || process.env.ELECTRON_OUT_DIR
|
||||
const outPath = path.resolve(SRC_DIR, 'out', outDir)
|
||||
const outDir = options.outDir || process.env.ELECTRON_OUT_DIR;
|
||||
const outPath = path.resolve(SRC_DIR, 'out', outDir);
|
||||
|
||||
// Check that user-set variable is a valid/existing directory
|
||||
if (fs.existsSync(outPath)) {
|
||||
if (shouldLog) console.log(`OUT_DIR is: ${outDir}`)
|
||||
return outDir
|
||||
if (shouldLog) console.log(`OUT_DIR is: ${outDir}`);
|
||||
return outDir;
|
||||
}
|
||||
|
||||
// Throw error if user passed/set nonexistent directory.
|
||||
throw new Error(`${outDir} directory not configured on your machine.`)
|
||||
throw new Error(`${outDir} directory not configured on your machine.`);
|
||||
} else {
|
||||
for (const buildType of presetDirs) {
|
||||
const outPath = path.resolve(SRC_DIR, 'out', buildType)
|
||||
const outPath = path.resolve(SRC_DIR, 'out', buildType);
|
||||
if (fs.existsSync(outPath)) {
|
||||
if (shouldLog) console.log(`OUT_DIR is: ${buildType}`)
|
||||
return buildType
|
||||
if (shouldLog) console.log(`OUT_DIR is: ${buildType}`);
|
||||
return buildType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we got here, it means process.env.ELECTRON_OUT_DIR was not
|
||||
// set and none of the preset options could be found in /out, so throw
|
||||
throw new Error(`No valid out directory found; use one of ${presetDirs.join(',')} or set process.env.ELECTRON_OUT_DIR`)
|
||||
throw new Error(`No valid out directory found; use one of ${presetDirs.join(',')} or set process.env.ELECTRON_OUT_DIR`);
|
||||
}
|
||||
|
||||
function getAbsoluteElectronExec () {
|
||||
return path.resolve(SRC_DIR, getElectronExec())
|
||||
return path.resolve(SRC_DIR, getElectronExec());
|
||||
}
|
||||
|
||||
async function handleGitCall (args, gitDir) {
|
||||
const details = await GitProcess.exec(args, gitDir)
|
||||
const details = await GitProcess.exec(args, gitDir);
|
||||
if (details.exitCode === 0) {
|
||||
return details.stdout.replace(/^\*|\s+|\s+$/, '')
|
||||
return details.stdout.replace(/^\*|\s+|\s+$/, '');
|
||||
} else {
|
||||
const error = GitProcess.parseError(details.stderr)
|
||||
console.log(`${fail} couldn't parse git process call: `, error)
|
||||
process.exit(1)
|
||||
const error = GitProcess.parseError(details.stderr);
|
||||
console.log(`${fail} couldn't parse git process call: `, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCurrentBranch (gitDir) {
|
||||
let branch = await handleGitCall(['rev-parse', '--abbrev-ref', 'HEAD'], gitDir)
|
||||
let branch = await handleGitCall(['rev-parse', '--abbrev-ref', 'HEAD'], gitDir);
|
||||
if (branch !== 'master' && !RELEASE_BRANCH_PATTERN.test(branch)) {
|
||||
const lastCommit = await handleGitCall(['rev-parse', 'HEAD'], gitDir)
|
||||
const lastCommit = await handleGitCall(['rev-parse', 'HEAD'], gitDir);
|
||||
const branches = (await handleGitCall([
|
||||
'branch',
|
||||
'--contains',
|
||||
lastCommit,
|
||||
'--remote'
|
||||
], gitDir)).split('\n')
|
||||
], gitDir)).split('\n');
|
||||
|
||||
branch = branches.filter(b => b.trim() === 'master' || b.trim() === 'origin/master' || RELEASE_BRANCH_PATTERN.test(b.trim()))[0]
|
||||
branch = branches.filter(b => b.trim() === 'master' || b.trim() === 'origin/master' || RELEASE_BRANCH_PATTERN.test(b.trim()))[0];
|
||||
if (!branch) {
|
||||
console.log(`${fail} no release branch exists for this ref`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} no release branch exists for this ref`);
|
||||
process.exit(1);
|
||||
}
|
||||
if (branch.startsWith('origin/')) branch = branch.substr('origin/'.length)
|
||||
if (branch.startsWith('origin/')) branch = branch.substr('origin/'.length);
|
||||
}
|
||||
return branch.trim()
|
||||
return branch.trim();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@ -99,4 +99,4 @@ module.exports = {
|
|||
getAbsoluteElectronExec,
|
||||
ELECTRON_DIR,
|
||||
SRC_DIR
|
||||
}
|
||||
};
|
||||
|
|
216
script/lint.js
216
script/lint.js
|
@ -1,14 +1,14 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const childProcess = require('child_process')
|
||||
const fs = require('fs')
|
||||
const klaw = require('klaw')
|
||||
const minimist = require('minimist')
|
||||
const path = require('path')
|
||||
const { GitProcess } = require('dugite');
|
||||
const childProcess = require('child_process');
|
||||
const fs = require('fs');
|
||||
const klaw = require('klaw');
|
||||
const minimist = require('minimist');
|
||||
const path = require('path');
|
||||
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname))
|
||||
const DEPOT_TOOLS = path.resolve(SOURCE_ROOT, '..', 'third_party', 'depot_tools')
|
||||
const SOURCE_ROOT = path.normalize(path.dirname(__dirname));
|
||||
const DEPOT_TOOLS = path.resolve(SOURCE_ROOT, '..', 'third_party', 'depot_tools');
|
||||
|
||||
const BLACKLIST = new Set([
|
||||
['shell', 'browser', 'mac', 'atom_application.h'],
|
||||
|
@ -30,31 +30,31 @@ const BLACKLIST = new Set([
|
|||
['spec', 'ts-smoke', 'electron', 'main.ts'],
|
||||
['spec', 'ts-smoke', 'electron', 'renderer.ts'],
|
||||
['spec', 'ts-smoke', 'runner.js']
|
||||
].map(tokens => path.join(SOURCE_ROOT, ...tokens)))
|
||||
].map(tokens => path.join(SOURCE_ROOT, ...tokens)));
|
||||
|
||||
function spawnAndCheckExitCode (cmd, args, opts) {
|
||||
opts = Object.assign({ stdio: 'inherit' }, opts)
|
||||
const status = childProcess.spawnSync(cmd, args, opts).status
|
||||
if (status) process.exit(status)
|
||||
opts = Object.assign({ stdio: 'inherit' }, opts);
|
||||
const status = childProcess.spawnSync(cmd, args, opts).status;
|
||||
if (status) process.exit(status);
|
||||
}
|
||||
|
||||
function cpplint (args) {
|
||||
const result = childProcess.spawnSync('cpplint.py', args, { encoding: 'utf8' })
|
||||
const result = childProcess.spawnSync('cpplint.py', args, { encoding: 'utf8' });
|
||||
// cpplint.py writes EVERYTHING to stderr, including status messages
|
||||
if (result.stderr) {
|
||||
for (const line of result.stderr.split(/[\r\n]+/)) {
|
||||
if (line.length && !line.startsWith('Done processing ') && line !== 'Total errors found: 0') {
|
||||
console.warn(line)
|
||||
console.warn(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (result.status) {
|
||||
process.exit(result.status)
|
||||
process.exit(result.status);
|
||||
}
|
||||
}
|
||||
|
||||
function isObjCHeader (filename) {
|
||||
return /\/(mac|cocoa)\//.test(filename)
|
||||
return /\/(mac|cocoa)\//.test(filename);
|
||||
}
|
||||
|
||||
const LINTERS = [{
|
||||
|
@ -63,11 +63,11 @@ const LINTERS = [{
|
|||
test: filename => filename.endsWith('.cc') || (filename.endsWith('.h') && !isObjCHeader(filename)),
|
||||
run: (opts, filenames) => {
|
||||
if (opts.fix) {
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', '--fix', ...filenames])
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', '--fix', ...filenames]);
|
||||
} else {
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', ...filenames])
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', ...filenames]);
|
||||
}
|
||||
cpplint(filenames)
|
||||
cpplint(filenames);
|
||||
}
|
||||
}, {
|
||||
key: 'objc',
|
||||
|
@ -75,27 +75,27 @@ const LINTERS = [{
|
|||
test: filename => filename.endsWith('.mm'),
|
||||
run: (opts, filenames) => {
|
||||
if (opts.fix) {
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', '--fix', ...filenames])
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', '--fix', ...filenames]);
|
||||
} else {
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', ...filenames])
|
||||
spawnAndCheckExitCode('python', ['script/run-clang-format.py', ...filenames]);
|
||||
}
|
||||
const filter = [
|
||||
'-readability/casting',
|
||||
'-whitespace/braces',
|
||||
'-whitespace/indent',
|
||||
'-whitespace/parens'
|
||||
]
|
||||
cpplint(['--extensions=mm', `--filter=${filter.join(',')}`, ...filenames])
|
||||
];
|
||||
cpplint(['--extensions=mm', `--filter=${filter.join(',')}`, ...filenames]);
|
||||
}
|
||||
}, {
|
||||
key: 'python',
|
||||
roots: ['script'],
|
||||
test: filename => filename.endsWith('.py'),
|
||||
run: (opts, filenames) => {
|
||||
const rcfile = path.join(DEPOT_TOOLS, 'pylintrc')
|
||||
const args = ['--rcfile=' + rcfile, ...filenames]
|
||||
const env = Object.assign({ PYTHONPATH: path.join(SOURCE_ROOT, 'script') }, process.env)
|
||||
spawnAndCheckExitCode('pylint.py', args, { env })
|
||||
const rcfile = path.join(DEPOT_TOOLS, 'pylintrc');
|
||||
const args = ['--rcfile=' + rcfile, ...filenames];
|
||||
const env = Object.assign({ PYTHONPATH: path.join(SOURCE_ROOT, 'script') }, process.env);
|
||||
spawnAndCheckExitCode('pylint.py', args, { env });
|
||||
}
|
||||
}, {
|
||||
key: 'javascript',
|
||||
|
@ -103,10 +103,10 @@ const LINTERS = [{
|
|||
ignoreRoots: ['spec/node_modules', 'spec-main/node_modules'],
|
||||
test: filename => filename.endsWith('.js') || filename.endsWith('.ts'),
|
||||
run: (opts, filenames) => {
|
||||
const cmd = path.join(SOURCE_ROOT, 'node_modules', '.bin', 'eslint')
|
||||
const args = ['--cache', '--ext', '.js,.ts', ...filenames]
|
||||
if (opts.fix) args.unshift('--fix')
|
||||
spawnAndCheckExitCode(cmd, args, { cwd: SOURCE_ROOT })
|
||||
const cmd = path.join(SOURCE_ROOT, 'node_modules', '.bin', 'eslint');
|
||||
const args = ['--cache', '--ext', '.js,.ts', ...filenames];
|
||||
if (opts.fix) args.unshift('--fix');
|
||||
spawnAndCheckExitCode(cmd, args, { cwd: SOURCE_ROOT });
|
||||
}
|
||||
}, {
|
||||
key: 'gn',
|
||||
|
@ -117,24 +117,24 @@ const LINTERS = [{
|
|||
const env = Object.assign({
|
||||
CHROMIUM_BUILDTOOLS_PATH: path.resolve(SOURCE_ROOT, '..', 'buildtools'),
|
||||
DEPOT_TOOLS_WIN_TOOLCHAIN: '0'
|
||||
}, process.env)
|
||||
}, process.env);
|
||||
// Users may not have depot_tools in PATH.
|
||||
env.PATH = `${env.PATH}${path.delimiter}${DEPOT_TOOLS}`
|
||||
const args = ['format', filename]
|
||||
if (!opts.fix) args.push('--dry-run')
|
||||
const result = childProcess.spawnSync('gn', args, { env, stdio: 'inherit', shell: true })
|
||||
env.PATH = `${env.PATH}${path.delimiter}${DEPOT_TOOLS}`;
|
||||
const args = ['format', filename];
|
||||
if (!opts.fix) args.push('--dry-run');
|
||||
const result = childProcess.spawnSync('gn', args, { env, stdio: 'inherit', shell: true });
|
||||
if (result.status === 0) {
|
||||
return true
|
||||
return true;
|
||||
} else if (result.status === 2) {
|
||||
console.log(`GN format errors in "${filename}". Run 'gn format "${filename}"' or rerun with --fix to fix them.`)
|
||||
return false
|
||||
console.log(`GN format errors in "${filename}". Run 'gn format "${filename}"' or rerun with --fix to fix them.`);
|
||||
return false;
|
||||
} else {
|
||||
console.log(`Error running 'gn format --dry-run "${filename}"': exit code ${result.status}`)
|
||||
return false
|
||||
console.log(`Error running 'gn format --dry-run "${filename}"': exit code ${result.status}`);
|
||||
return false;
|
||||
}
|
||||
}).every(x => x)
|
||||
}).every(x => x);
|
||||
if (!allOk) {
|
||||
process.exit(1)
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}, {
|
||||
|
@ -142,167 +142,167 @@ const LINTERS = [{
|
|||
roots: ['patches'],
|
||||
test: () => true,
|
||||
run: (opts, filenames) => {
|
||||
const patchesDir = path.resolve(__dirname, '../patches')
|
||||
const patchesDir = path.resolve(__dirname, '../patches');
|
||||
for (const patchTarget of fs.readdirSync(patchesDir)) {
|
||||
const targetDir = path.resolve(patchesDir, patchTarget)
|
||||
const targetDir = path.resolve(patchesDir, patchTarget);
|
||||
// If the config does not exist that is OK, we just skip this dir
|
||||
const targetConfig = path.resolve(targetDir, 'config.json')
|
||||
if (!fs.existsSync(targetConfig)) continue
|
||||
const targetConfig = path.resolve(targetDir, 'config.json');
|
||||
if (!fs.existsSync(targetConfig)) continue;
|
||||
|
||||
const config = JSON.parse(fs.readFileSync(targetConfig, 'utf8'))
|
||||
const config = JSON.parse(fs.readFileSync(targetConfig, 'utf8'));
|
||||
for (const key of Object.keys(config)) {
|
||||
// The directory the config points to should exist
|
||||
const targetPatchesDir = path.resolve(__dirname, '../../..', key)
|
||||
if (!fs.existsSync(targetPatchesDir)) throw new Error(`target patch directory: "${targetPatchesDir}" does not exist`)
|
||||
const targetPatchesDir = path.resolve(__dirname, '../../..', key);
|
||||
if (!fs.existsSync(targetPatchesDir)) throw new Error(`target patch directory: "${targetPatchesDir}" does not exist`);
|
||||
// We need a .patches file
|
||||
const dotPatchesPath = path.resolve(targetPatchesDir, '.patches')
|
||||
if (!fs.existsSync(dotPatchesPath)) throw new Error(`.patches file: "${dotPatchesPath}" does not exist`)
|
||||
const dotPatchesPath = path.resolve(targetPatchesDir, '.patches');
|
||||
if (!fs.existsSync(dotPatchesPath)) throw new Error(`.patches file: "${dotPatchesPath}" does not exist`);
|
||||
|
||||
// Read the patch list
|
||||
const patchFileList = fs.readFileSync(dotPatchesPath, 'utf8').trim().split('\n')
|
||||
const patchFileSet = new Set(patchFileList)
|
||||
const patchFileList = fs.readFileSync(dotPatchesPath, 'utf8').trim().split('\n');
|
||||
const patchFileSet = new Set(patchFileList);
|
||||
patchFileList.reduce((seen, file) => {
|
||||
if (seen.has(file)) {
|
||||
throw new Error(`'${file}' is listed in ${dotPatchesPath} more than once`)
|
||||
throw new Error(`'${file}' is listed in ${dotPatchesPath} more than once`);
|
||||
}
|
||||
return seen.add(file)
|
||||
}, new Set())
|
||||
if (patchFileList.length !== patchFileSet.size) throw new Error('each patch file should only be in the .patches file once')
|
||||
return seen.add(file);
|
||||
}, new Set());
|
||||
if (patchFileList.length !== patchFileSet.size) throw new Error('each patch file should only be in the .patches file once');
|
||||
for (const file of fs.readdirSync(targetPatchesDir)) {
|
||||
// Ignore the .patches file and READMEs
|
||||
if (file === '.patches' || file === 'README.md') continue
|
||||
if (file === '.patches' || file === 'README.md') continue;
|
||||
|
||||
if (!patchFileSet.has(file)) {
|
||||
throw new Error(`Expected the .patches file at "${dotPatchesPath}" to contain a patch file ("${file}") present in the directory but it did not`)
|
||||
throw new Error(`Expected the .patches file at "${dotPatchesPath}" to contain a patch file ("${file}") present in the directory but it did not`);
|
||||
}
|
||||
patchFileSet.delete(file)
|
||||
patchFileSet.delete(file);
|
||||
}
|
||||
|
||||
// If anything is left in this set, it means it did not exist on disk
|
||||
if (patchFileSet.size > 0) {
|
||||
throw new Error(`Expected all the patch files listed in the .patches file at "${dotPatchesPath}" to exist but some did not:\n${JSON.stringify([...patchFileSet.values()], null, 2)}`)
|
||||
throw new Error(`Expected all the patch files listed in the .patches file at "${dotPatchesPath}" to exist but some did not:\n${JSON.stringify([...patchFileSet.values()], null, 2)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ok = true
|
||||
let ok = true;
|
||||
filenames.filter(f => f.endsWith('.patch')).forEach(f => {
|
||||
const patchText = fs.readFileSync(f, 'utf8')
|
||||
const patchText = fs.readFileSync(f, 'utf8');
|
||||
if (/^Subject: .*$\s+^diff/m.test(patchText)) {
|
||||
console.warn(`Patch file '${f}' has no description. Every patch must contain a justification for why the patch exists and the plan for its removal.`)
|
||||
ok = false
|
||||
console.warn(`Patch file '${f}' has no description. Every patch must contain a justification for why the patch exists and the plan for its removal.`);
|
||||
ok = false;
|
||||
}
|
||||
})
|
||||
});
|
||||
if (!ok) {
|
||||
process.exit(1)
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}]
|
||||
}];
|
||||
|
||||
function parseCommandLine () {
|
||||
let help
|
||||
let help;
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['c++', 'objc', 'javascript', 'python', 'gn', 'patches', 'help', 'changed', 'fix', 'verbose', 'only'],
|
||||
alias: { 'c++': ['cc', 'cpp', 'cxx'], javascript: ['js', 'es'], python: 'py', changed: 'c', help: 'h', verbose: 'v' },
|
||||
unknown: arg => { help = true }
|
||||
})
|
||||
unknown: arg => { help = true; }
|
||||
});
|
||||
if (help || opts.help) {
|
||||
console.log('Usage: script/lint.js [--cc] [--js] [--py] [-c|--changed] [-h|--help] [-v|--verbose] [--fix] [--only -- file1 file2]')
|
||||
process.exit(0)
|
||||
console.log('Usage: script/lint.js [--cc] [--js] [--py] [-c|--changed] [-h|--help] [-v|--verbose] [--fix] [--only -- file1 file2]');
|
||||
process.exit(0);
|
||||
}
|
||||
return opts
|
||||
return opts;
|
||||
}
|
||||
|
||||
async function findChangedFiles (top) {
|
||||
const result = await GitProcess.exec(['diff', '--name-only', '--cached'], top)
|
||||
const result = await GitProcess.exec(['diff', '--name-only', '--cached'], top);
|
||||
if (result.exitCode !== 0) {
|
||||
console.log('Failed to find changed files', GitProcess.parseError(result.stderr))
|
||||
process.exit(1)
|
||||
console.log('Failed to find changed files', GitProcess.parseError(result.stderr));
|
||||
process.exit(1);
|
||||
}
|
||||
const relativePaths = result.stdout.split(/\r\n|\r|\n/g)
|
||||
const absolutePaths = relativePaths.map(x => path.join(top, x))
|
||||
return new Set(absolutePaths)
|
||||
const relativePaths = result.stdout.split(/\r\n|\r|\n/g);
|
||||
const absolutePaths = relativePaths.map(x => path.join(top, x));
|
||||
return new Set(absolutePaths);
|
||||
}
|
||||
|
||||
async function findMatchingFiles (top, test) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const matches = []
|
||||
const matches = [];
|
||||
klaw(top, {
|
||||
filter: f => path.basename(f) !== '.bin'
|
||||
})
|
||||
.on('end', () => resolve(matches))
|
||||
.on('data', item => {
|
||||
if (test(item.path)) {
|
||||
matches.push(item.path)
|
||||
matches.push(item.path);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function findFiles (args, linter) {
|
||||
let filenames = []
|
||||
let whitelist = null
|
||||
let filenames = [];
|
||||
let whitelist = null;
|
||||
|
||||
// build the whitelist
|
||||
if (args.changed) {
|
||||
whitelist = await findChangedFiles(SOURCE_ROOT)
|
||||
whitelist = await findChangedFiles(SOURCE_ROOT);
|
||||
if (!whitelist.size) {
|
||||
return []
|
||||
return [];
|
||||
}
|
||||
} else if (args.only) {
|
||||
whitelist = new Set(args._.map(p => path.resolve(p)))
|
||||
whitelist = new Set(args._.map(p => path.resolve(p)));
|
||||
}
|
||||
|
||||
// accumulate the raw list of files
|
||||
for (const root of linter.roots) {
|
||||
const files = await findMatchingFiles(path.join(SOURCE_ROOT, root), linter.test)
|
||||
filenames.push(...files)
|
||||
const files = await findMatchingFiles(path.join(SOURCE_ROOT, root), linter.test);
|
||||
filenames.push(...files);
|
||||
}
|
||||
|
||||
for (const ignoreRoot of (linter.ignoreRoots) || []) {
|
||||
const ignorePath = path.join(SOURCE_ROOT, ignoreRoot)
|
||||
if (!fs.existsSync(ignorePath)) continue
|
||||
const ignorePath = path.join(SOURCE_ROOT, ignoreRoot);
|
||||
if (!fs.existsSync(ignorePath)) continue;
|
||||
|
||||
const ignoreFiles = new Set(await findMatchingFiles(ignorePath, linter.test))
|
||||
filenames = filenames.filter(fileName => !ignoreFiles.has(fileName))
|
||||
const ignoreFiles = new Set(await findMatchingFiles(ignorePath, linter.test));
|
||||
filenames = filenames.filter(fileName => !ignoreFiles.has(fileName));
|
||||
}
|
||||
|
||||
// remove blacklisted files
|
||||
filenames = filenames.filter(x => !BLACKLIST.has(x))
|
||||
filenames = filenames.filter(x => !BLACKLIST.has(x));
|
||||
|
||||
// if a whitelist exists, remove anything not in it
|
||||
if (whitelist) {
|
||||
filenames = filenames.filter(x => whitelist.has(x))
|
||||
filenames = filenames.filter(x => whitelist.has(x));
|
||||
}
|
||||
|
||||
// it's important that filenames be relative otherwise clang-format will
|
||||
// produce patches with absolute paths in them, which `git apply` will refuse
|
||||
// to apply.
|
||||
return filenames.map(x => path.relative(SOURCE_ROOT, x))
|
||||
return filenames.map(x => path.relative(SOURCE_ROOT, x));
|
||||
}
|
||||
|
||||
async function main () {
|
||||
const opts = parseCommandLine()
|
||||
const opts = parseCommandLine();
|
||||
|
||||
// no mode specified? run 'em all
|
||||
if (!opts['c++'] && !opts.javascript && !opts.objc && !opts.python && !opts.gn && !opts.patches) {
|
||||
opts['c++'] = opts.javascript = opts.objc = opts.python = opts.gn = opts.patches = true
|
||||
opts['c++'] = opts.javascript = opts.objc = opts.python = opts.gn = opts.patches = true;
|
||||
}
|
||||
|
||||
const linters = LINTERS.filter(x => opts[x.key])
|
||||
const linters = LINTERS.filter(x => opts[x.key]);
|
||||
|
||||
for (const linter of linters) {
|
||||
const filenames = await findFiles(opts, linter)
|
||||
const filenames = await findFiles(opts, linter);
|
||||
if (filenames.length) {
|
||||
if (opts.verbose) { console.log(`linting ${filenames.length} ${linter.key} ${filenames.length === 1 ? 'file' : 'files'}`) }
|
||||
linter.run(opts, filenames)
|
||||
if (opts.verbose) { console.log(`linting ${filenames.length} ${linter.key} ${filenames.length === 1 ? 'file' : 'files'}`); }
|
||||
linter.run(opts, filenames);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((error) => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,49 +1,49 @@
|
|||
const cp = require('child_process')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const BASE = path.resolve(__dirname, '../..')
|
||||
const NAN_DIR = path.resolve(BASE, 'third_party', 'nan')
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx'
|
||||
const BASE = path.resolve(__dirname, '../..');
|
||||
const NAN_DIR = path.resolve(BASE, 'third_party', 'nan');
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
|
||||
const utils = require('./lib/utils')
|
||||
const { YARN_VERSION } = require('./yarn')
|
||||
const utils = require('./lib/utils');
|
||||
const { YARN_VERSION } = require('./yarn');
|
||||
|
||||
if (!process.mainModule) {
|
||||
throw new Error('Must call the nan spec runner directly')
|
||||
throw new Error('Must call the nan spec runner directly');
|
||||
}
|
||||
|
||||
async function main () {
|
||||
const nodeDir = path.resolve(BASE, `out/${utils.getOutDir({ shouldLog: true })}/gen/node_headers`)
|
||||
const nodeDir = path.resolve(BASE, `out/${utils.getOutDir({ shouldLog: true })}/gen/node_headers`);
|
||||
const env = Object.assign({}, process.env, {
|
||||
npm_config_nodedir: nodeDir,
|
||||
npm_config_msvs_version: '2019',
|
||||
npm_config_arch: process.env.NPM_CONFIG_ARCH
|
||||
})
|
||||
});
|
||||
const { status: buildStatus } = cp.spawnSync(NPX_CMD, ['node-gyp', 'rebuild', '--directory', 'test'], {
|
||||
env,
|
||||
cwd: NAN_DIR,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
if (buildStatus !== 0) {
|
||||
console.error('Failed to build nan test modules')
|
||||
return process.exit(buildStatus)
|
||||
console.error('Failed to build nan test modules');
|
||||
return process.exit(buildStatus);
|
||||
}
|
||||
|
||||
const { status: installStatus } = cp.spawnSync(NPX_CMD, [`yarn@${YARN_VERSION}`, 'install'], {
|
||||
env,
|
||||
cwd: NAN_DIR,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
if (installStatus !== 0) {
|
||||
console.error('Failed to install nan node_modules')
|
||||
return process.exit(installStatus)
|
||||
console.error('Failed to install nan node_modules');
|
||||
return process.exit(installStatus);
|
||||
}
|
||||
|
||||
const DISABLED_TESTS = ['nannew-test.js']
|
||||
const DISABLED_TESTS = ['nannew-test.js'];
|
||||
const testsToRun = fs.readdirSync(path.resolve(NAN_DIR, 'test', 'js'))
|
||||
.filter(test => !DISABLED_TESTS.includes(test))
|
||||
.map(test => `test/js/${test}`)
|
||||
.map(test => `test/js/${test}`);
|
||||
|
||||
const testChild = cp.spawn(utils.getAbsoluteElectronExec(), ['node_modules/.bin/tap', ...testsToRun], {
|
||||
env: {
|
||||
|
@ -52,13 +52,13 @@ async function main () {
|
|||
},
|
||||
cwd: NAN_DIR,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
testChild.on('exit', (testCode) => {
|
||||
process.exit(testCode)
|
||||
})
|
||||
process.exit(testCode);
|
||||
});
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('An unhandled error occurred in the nan spec runner', err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error('An unhandled error occurred in the nan spec runner', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
const cp = require('child_process')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['default'],
|
||||
string: ['jUnitDir']
|
||||
})
|
||||
});
|
||||
|
||||
const BASE = path.resolve(__dirname, '../..')
|
||||
const DISABLED_TESTS = require('./node-disabled-tests.json')
|
||||
const NODE_DIR = path.resolve(BASE, 'third_party', 'electron_node')
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx'
|
||||
const JUNIT_DIR = args.jUnitDir ? path.resolve(args.jUnitDir) : null
|
||||
const TAP_FILE_NAME = 'test.tap'
|
||||
const BASE = path.resolve(__dirname, '../..');
|
||||
const DISABLED_TESTS = require('./node-disabled-tests.json');
|
||||
const NODE_DIR = path.resolve(BASE, 'third_party', 'electron_node');
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
const JUNIT_DIR = args.jUnitDir ? path.resolve(args.jUnitDir) : null;
|
||||
const TAP_FILE_NAME = 'test.tap';
|
||||
|
||||
const utils = require('./lib/utils')
|
||||
const { YARN_VERSION } = require('./yarn')
|
||||
const utils = require('./lib/utils');
|
||||
const { YARN_VERSION } = require('./yarn');
|
||||
|
||||
if (!process.mainModule) {
|
||||
throw new Error('Must call the node spec runner directly')
|
||||
throw new Error('Must call the node spec runner directly');
|
||||
}
|
||||
|
||||
const defaultOptions = [
|
||||
|
@ -33,15 +33,15 @@ const defaultOptions = [
|
|||
'--shell',
|
||||
utils.getAbsoluteElectronExec(),
|
||||
'-J'
|
||||
]
|
||||
];
|
||||
|
||||
const getCustomOptions = () => {
|
||||
let customOptions = ['tools/test.py']
|
||||
let customOptions = ['tools/test.py'];
|
||||
|
||||
// Add all custom arguments.
|
||||
const extra = process.argv.slice(2)
|
||||
const extra = process.argv.slice(2);
|
||||
if (extra) {
|
||||
customOptions = customOptions.concat(extra)
|
||||
customOptions = customOptions.concat(extra);
|
||||
}
|
||||
|
||||
// We need this unilaterally or Node.js will try
|
||||
|
@ -49,13 +49,13 @@ const getCustomOptions = () => {
|
|||
customOptions = customOptions.concat([
|
||||
'--shell',
|
||||
utils.getAbsoluteElectronExec()
|
||||
])
|
||||
]);
|
||||
|
||||
return customOptions
|
||||
}
|
||||
return customOptions;
|
||||
};
|
||||
|
||||
async function main () {
|
||||
const options = args.default ? defaultOptions : getCustomOptions()
|
||||
const options = args.default ? defaultOptions : getCustomOptions();
|
||||
|
||||
const testChild = cp.spawn('python', options, {
|
||||
env: {
|
||||
|
@ -65,23 +65,23 @@ async function main () {
|
|||
},
|
||||
cwd: NODE_DIR,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
testChild.on('exit', (testCode) => {
|
||||
if (JUNIT_DIR) {
|
||||
fs.mkdirSync(JUNIT_DIR)
|
||||
const converterStream = require('tap-xunit')()
|
||||
fs.mkdirSync(JUNIT_DIR);
|
||||
const converterStream = require('tap-xunit')();
|
||||
fs.createReadStream(
|
||||
path.resolve(NODE_DIR, TAP_FILE_NAME)
|
||||
).pipe(converterStream).pipe(
|
||||
fs.createWriteStream(path.resolve(JUNIT_DIR, 'nodejs.xml'))
|
||||
).on('close', () => {
|
||||
process.exit(testCode)
|
||||
})
|
||||
process.exit(testCode);
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('An unhandled error occurred in the node spec runner', err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error('An unhandled error occurred in the node spec runner', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const assert = require('assert')
|
||||
const request = require('request')
|
||||
const assert = require('assert');
|
||||
const request = require('request');
|
||||
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds'
|
||||
const CIRCLECI_PIPELINE_URL = 'https://circleci.com/api/v2/project/gh/electron/electron/pipeline'
|
||||
const VSTS_URL = 'https://github.visualstudio.com/electron/_apis/build'
|
||||
const CIRCLECI_WAIT_TIME = process.env.CIRCLECI_WAIT_TIME || 30000
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds';
|
||||
const CIRCLECI_PIPELINE_URL = 'https://circleci.com/api/v2/project/gh/electron/electron/pipeline';
|
||||
const VSTS_URL = 'https://github.visualstudio.com/electron/_apis/build';
|
||||
const CIRCLECI_WAIT_TIME = process.env.CIRCLECI_WAIT_TIME || 30000;
|
||||
|
||||
const appVeyorJobs = {
|
||||
'electron-x64': 'electron-x64-release',
|
||||
'electron-ia32': 'electron-ia32-release',
|
||||
'electron-woa': 'electron-woa-release'
|
||||
}
|
||||
};
|
||||
|
||||
const circleCIJobs = [
|
||||
'linux-arm-publish',
|
||||
|
@ -21,50 +21,50 @@ const circleCIJobs = [
|
|||
'linux-x64-publish',
|
||||
'mas-publish',
|
||||
'osx-publish'
|
||||
]
|
||||
];
|
||||
|
||||
const circleCIPublishWorkflows = [
|
||||
'linux-publish',
|
||||
'macos-publish'
|
||||
]
|
||||
];
|
||||
|
||||
const vstsArmJobs = [
|
||||
'electron-arm-testing',
|
||||
'electron-arm64-testing',
|
||||
'electron-woa-testing'
|
||||
]
|
||||
];
|
||||
|
||||
let jobRequestedCount = 0
|
||||
let jobRequestedCount = 0;
|
||||
|
||||
async function makeRequest (requestOptions, parseResponse) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request(requestOptions, (err, res, body) => {
|
||||
if (!err && res.statusCode >= 200 && res.statusCode < 300) {
|
||||
if (parseResponse) {
|
||||
const build = JSON.parse(body)
|
||||
resolve(build)
|
||||
const build = JSON.parse(body);
|
||||
resolve(build);
|
||||
} else {
|
||||
resolve(body)
|
||||
resolve(body);
|
||||
}
|
||||
} else {
|
||||
console.error('Error occurred while requesting:', requestOptions.url)
|
||||
console.error('Error occurred while requesting:', requestOptions.url);
|
||||
if (parseResponse) {
|
||||
try {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body))
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body));
|
||||
} catch (err) {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, res.body)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, res.body);
|
||||
}
|
||||
} else {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body);
|
||||
}
|
||||
reject(err)
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function circleCIcall (targetBranch, job, options) {
|
||||
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const buildRequest = {
|
||||
branch: targetBranch,
|
||||
parameters: {
|
||||
|
@ -72,82 +72,82 @@ async function circleCIcall (targetBranch, job, options) {
|
|||
'run-build-linux': false,
|
||||
'run-build-mac': false
|
||||
}
|
||||
}
|
||||
};
|
||||
if (options.ghRelease) {
|
||||
buildRequest.parameters['upload-to-s3'] = '0'
|
||||
buildRequest.parameters['upload-to-s3'] = '0';
|
||||
} else {
|
||||
buildRequest.parameters['upload-to-s3'] = '1'
|
||||
buildRequest.parameters['upload-to-s3'] = '1';
|
||||
}
|
||||
buildRequest.parameters[`run-${job}`] = true
|
||||
jobRequestedCount++
|
||||
buildRequest.parameters[`run-${job}`] = true;
|
||||
jobRequestedCount++;
|
||||
// The logic below expects that the CircleCI workflows for releases each
|
||||
// contain only one job in order to maintain compatibility with sudowoodo.
|
||||
// If the workflows are changed in the CircleCI config.yml, this logic will
|
||||
// also need to be changed as well as possibly changing sudowoodo.
|
||||
try {
|
||||
const circleResponse = await circleCIRequest(CIRCLECI_PIPELINE_URL, 'POST', buildRequest)
|
||||
console.log(`CircleCI release build pipeline ${circleResponse.id} for ${job} triggered.`)
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${circleResponse.id}`
|
||||
const workflowId = await getCircleCIWorkflowId(circleResponse.id)
|
||||
const circleResponse = await circleCIRequest(CIRCLECI_PIPELINE_URL, 'POST', buildRequest);
|
||||
console.log(`CircleCI release build pipeline ${circleResponse.id} for ${job} triggered.`);
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${circleResponse.id}`;
|
||||
const workflowId = await getCircleCIWorkflowId(circleResponse.id);
|
||||
if (workflowId === -1) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const workFlowUrl = `https://circleci.com/workflow-run/${workflowId}`
|
||||
const workFlowUrl = `https://circleci.com/workflow-run/${workflowId}`;
|
||||
if (options.runningPublishWorkflows) {
|
||||
console.log(`CircleCI release workflow request for ${job} successful. Check ${workFlowUrl} for status.`)
|
||||
console.log(`CircleCI release workflow request for ${job} successful. Check ${workFlowUrl} for status.`);
|
||||
} else {
|
||||
console.log(`CircleCI release build workflow running at https://circleci.com/workflow-run/${workflowId} for ${job}.`)
|
||||
const jobNumber = await getCircleCIJobNumber(workflowId)
|
||||
console.log(`CircleCI release build workflow running at https://circleci.com/workflow-run/${workflowId} for ${job}.`);
|
||||
const jobNumber = await getCircleCIJobNumber(workflowId);
|
||||
if (jobNumber === -1) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const jobUrl = `https://circleci.com/gh/electron/electron/${jobNumber}`
|
||||
console.log(`CircleCI release build request for ${job} successful. Check ${jobUrl} for status.`)
|
||||
const jobUrl = `https://circleci.com/gh/electron/electron/${jobNumber}`;
|
||||
console.log(`CircleCI release build request for ${job} successful. Check ${jobUrl} for status.`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error calling CircleCI: ', err)
|
||||
console.log('Error calling CircleCI: ', err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCircleCIWorkflowId (pipelineId) {
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${pipelineId}`
|
||||
let workflowId = 0
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${pipelineId}`;
|
||||
let workflowId = 0;
|
||||
while (workflowId === 0) {
|
||||
const pipelineInfo = await circleCIRequest(pipelineInfoUrl, 'GET')
|
||||
const pipelineInfo = await circleCIRequest(pipelineInfoUrl, 'GET');
|
||||
switch (pipelineInfo.state) {
|
||||
case 'created': {
|
||||
const workflows = await circleCIRequest(`${pipelineInfoUrl}/workflow`, 'GET')
|
||||
const workflows = await circleCIRequest(`${pipelineInfoUrl}/workflow`, 'GET');
|
||||
if (workflows.items.length === 1) {
|
||||
workflowId = workflows.items[0].id
|
||||
break
|
||||
workflowId = workflows.items[0].id;
|
||||
break;
|
||||
}
|
||||
console.log('Unxpected number of workflows, response was:', pipelineInfo)
|
||||
workflowId = -1
|
||||
break
|
||||
console.log('Unxpected number of workflows, response was:', pipelineInfo);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
case 'error': {
|
||||
console.log('Error retrieving workflows, response was:', pipelineInfo)
|
||||
workflowId = -1
|
||||
break
|
||||
console.log('Error retrieving workflows, response was:', pipelineInfo);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME))
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return workflowId
|
||||
return workflowId;
|
||||
}
|
||||
|
||||
async function getCircleCIJobNumber (workflowId) {
|
||||
const jobInfoUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`
|
||||
let jobNumber = 0
|
||||
const jobInfoUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`;
|
||||
let jobNumber = 0;
|
||||
while (jobNumber === 0) {
|
||||
const jobInfo = await circleCIRequest(jobInfoUrl, 'GET')
|
||||
const jobInfo = await circleCIRequest(jobInfoUrl, 'GET');
|
||||
if (!jobInfo.items) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (jobInfo.items.length !== 1) {
|
||||
console.log('Unxpected number of jobs, response was:', jobInfo)
|
||||
jobNumber = -1
|
||||
break
|
||||
console.log('Unxpected number of jobs, response was:', jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (jobInfo.items[0].status) {
|
||||
|
@ -155,9 +155,9 @@ async function getCircleCIJobNumber (workflowId) {
|
|||
case 'queued':
|
||||
case 'running': {
|
||||
if (jobInfo.items[0].job_number && !isNaN(jobInfo.items[0].job_number)) {
|
||||
jobNumber = jobInfo.items[0].job_number
|
||||
jobNumber = jobInfo.items[0].job_number;
|
||||
}
|
||||
break
|
||||
break;
|
||||
}
|
||||
case 'canceled':
|
||||
case 'error':
|
||||
|
@ -165,14 +165,14 @@ async function getCircleCIJobNumber (workflowId) {
|
|||
case 'timedout':
|
||||
case 'not_run':
|
||||
case 'failed': {
|
||||
console.log(`Error job returned a status of ${jobInfo.items[0].status}, response was:`, jobInfo)
|
||||
jobNumber = -1
|
||||
break
|
||||
console.log(`Error job returned a status of ${jobInfo.items[0].status}, response was:`, jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME))
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return jobNumber
|
||||
return jobNumber;
|
||||
}
|
||||
|
||||
async function circleCIRequest (url, method, requestBody) {
|
||||
|
@ -189,28 +189,28 @@ async function circleCIRequest (url, method, requestBody) {
|
|||
},
|
||||
body: requestBody ? JSON.stringify(requestBody) : null
|
||||
}, true).catch(err => {
|
||||
console.log('Error calling CircleCI:', err)
|
||||
})
|
||||
console.log('Error calling CircleCI:', err);
|
||||
});
|
||||
}
|
||||
|
||||
function buildAppVeyor (targetBranch, options) {
|
||||
const validJobs = Object.keys(appVeyorJobs)
|
||||
const validJobs = Object.keys(appVeyorJobs);
|
||||
if (options.job) {
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`)
|
||||
callAppVeyor(targetBranch, options.job, options)
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`);
|
||||
callAppVeyor(targetBranch, options.job, options);
|
||||
} else {
|
||||
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options))
|
||||
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options));
|
||||
}
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch, job, options) {
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
ELECTRON_RELEASE: 1
|
||||
}
|
||||
};
|
||||
|
||||
if (!options.ghRelease) {
|
||||
environmentVariables.UPLOAD_TO_S3 = 1
|
||||
environmentVariables.UPLOAD_TO_S3 = 1;
|
||||
}
|
||||
|
||||
const requestOpts = {
|
||||
|
@ -228,44 +228,44 @@ async function callAppVeyor (targetBranch, job, options) {
|
|||
environmentVariables
|
||||
}),
|
||||
method: 'POST'
|
||||
}
|
||||
jobRequestedCount++
|
||||
};
|
||||
jobRequestedCount++;
|
||||
const appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling AppVeyor:', err)
|
||||
})
|
||||
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`)
|
||||
console.log('Error calling AppVeyor:', err);
|
||||
});
|
||||
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`;
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`);
|
||||
}
|
||||
|
||||
function buildCircleCI (targetBranch, options) {
|
||||
if (options.job) {
|
||||
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`)
|
||||
circleCIcall(targetBranch, options.job, options)
|
||||
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`);
|
||||
circleCIcall(targetBranch, options.job, options);
|
||||
} else {
|
||||
options.runningPublishWorkflows = true
|
||||
circleCIPublishWorkflows.forEach((job) => circleCIcall(targetBranch, job, options))
|
||||
options.runningPublishWorkflows = true;
|
||||
circleCIPublishWorkflows.forEach((job) => circleCIcall(targetBranch, job, options));
|
||||
}
|
||||
}
|
||||
|
||||
async function buildVSTS (targetBranch, options) {
|
||||
if (options.armTest) {
|
||||
assert(vstsArmJobs.includes(options.job), `Unknown VSTS CI arm test job name: ${options.job}. Valid values are: ${vstsArmJobs}.`)
|
||||
assert(vstsArmJobs.includes(options.job), `Unknown VSTS CI arm test job name: ${options.job}. Valid values are: ${vstsArmJobs}.`);
|
||||
}
|
||||
|
||||
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
ELECTRON_RELEASE: 1
|
||||
}
|
||||
};
|
||||
|
||||
if (options.armTest) {
|
||||
if (options.circleBuildNum) {
|
||||
environmentVariables.CIRCLE_BUILD_NUM = options.circleBuildNum
|
||||
environmentVariables.CIRCLE_BUILD_NUM = options.circleBuildNum;
|
||||
} else if (options.appveyorJobId) {
|
||||
environmentVariables.APPVEYOR_JOB_ID = options.appveyorJobId
|
||||
environmentVariables.APPVEYOR_JOB_ID = options.appveyorJobId;
|
||||
}
|
||||
} else {
|
||||
if (!options.ghRelease) {
|
||||
environmentVariables.UPLOAD_TO_S3 = 1
|
||||
environmentVariables.UPLOAD_TO_S3 = 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,12 +278,12 @@ async function buildVSTS (targetBranch, options) {
|
|||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
};
|
||||
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling VSTS to get build definitions:', err)
|
||||
})
|
||||
const buildsToRun = vstsResponse.value.filter(build => build.name === options.job)
|
||||
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables))
|
||||
console.log('Error calling VSTS to get build definitions:', err);
|
||||
});
|
||||
const buildsToRun = vstsResponse.value.filter(build => build.name === options.job);
|
||||
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables));
|
||||
}
|
||||
|
||||
async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
||||
|
@ -291,9 +291,9 @@ async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
|||
definition: build,
|
||||
sourceBranch: targetBranch,
|
||||
priority: 'high'
|
||||
}
|
||||
};
|
||||
if (Object.keys(environmentVariables).length !== 0) {
|
||||
buildBody.parameters = JSON.stringify(environmentVariables)
|
||||
buildBody.parameters = JSON.stringify(environmentVariables);
|
||||
}
|
||||
const requestOpts = {
|
||||
url: `${VSTS_URL}/builds?api-version=4.1`,
|
||||
|
@ -306,54 +306,54 @@ async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
|||
},
|
||||
body: JSON.stringify(buildBody),
|
||||
method: 'POST'
|
||||
}
|
||||
jobRequestedCount++
|
||||
};
|
||||
jobRequestedCount++;
|
||||
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log(`Error calling VSTS for job ${build.name}`, err)
|
||||
})
|
||||
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`)
|
||||
console.log(`Error calling VSTS for job ${build.name}`, err);
|
||||
});
|
||||
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`);
|
||||
}
|
||||
|
||||
function runRelease (targetBranch, options) {
|
||||
if (options.ci) {
|
||||
switch (options.ci) {
|
||||
case 'CircleCI': {
|
||||
buildCircleCI(targetBranch, options)
|
||||
break
|
||||
buildCircleCI(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
case 'AppVeyor': {
|
||||
buildAppVeyor(targetBranch, options)
|
||||
break
|
||||
buildAppVeyor(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
case 'VSTS': {
|
||||
buildVSTS(targetBranch, options)
|
||||
break
|
||||
buildVSTS(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`)
|
||||
process.exit(1)
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
buildCircleCI(targetBranch, options)
|
||||
buildAppVeyor(targetBranch, options)
|
||||
buildCircleCI(targetBranch, options);
|
||||
buildAppVeyor(targetBranch, options);
|
||||
}
|
||||
console.log(`${jobRequestedCount} jobs were requested.`)
|
||||
console.log(`${jobRequestedCount} jobs were requested.`);
|
||||
}
|
||||
|
||||
module.exports = runRelease
|
||||
module.exports = runRelease;
|
||||
|
||||
if (require.main === module) {
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['ghRelease', 'armTest']
|
||||
})
|
||||
const targetBranch = args._[0]
|
||||
});
|
||||
const targetBranch = args._[0];
|
||||
if (args._.length < 1) {
|
||||
console.log(`Trigger CI to build release builds of electron.
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|VSTS]
|
||||
[--ghRelease] [--armTest] [--circleBuildNum=xxx] [--appveyorJobId=xxx] TARGET_BRANCH
|
||||
`)
|
||||
process.exit(0)
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
runRelease(targetBranch, args)
|
||||
runRelease(targetBranch, args);
|
||||
}
|
||||
|
|
|
@ -1,38 +1,38 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.log('Usage: find-release version')
|
||||
process.exit(1)
|
||||
console.log('Usage: find-release version');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = process.argv[2]
|
||||
const version = process.argv[2];
|
||||
|
||||
async function findRelease () {
|
||||
const releases = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
})
|
||||
});
|
||||
|
||||
const targetRelease = releases.data.find(release => release.tag_name === version)
|
||||
let returnObject = {}
|
||||
const targetRelease = releases.data.find(release => release.tag_name === version);
|
||||
let returnObject = {};
|
||||
|
||||
if (targetRelease) {
|
||||
returnObject = {
|
||||
id: targetRelease.id,
|
||||
draft: targetRelease.draft,
|
||||
exists: true
|
||||
}
|
||||
};
|
||||
} else {
|
||||
returnObject = {
|
||||
exists: false,
|
||||
draft: false
|
||||
}
|
||||
};
|
||||
}
|
||||
console.log(JSON.stringify(returnObject))
|
||||
console.log(JSON.stringify(returnObject));
|
||||
}
|
||||
|
||||
findRelease()
|
||||
findRelease();
|
||||
|
|
|
@ -1,53 +1,53 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const minimist = require('minimist')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
const { GitProcess } = require('dugite');
|
||||
const minimist = require('minimist');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils')
|
||||
const notesGenerator = require('./notes.js')
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const notesGenerator = require('./notes.js');
|
||||
|
||||
const semverify = version => version.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.')
|
||||
const semverify = version => version.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.');
|
||||
|
||||
const runGit = async (args) => {
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR)
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim())
|
||||
throw new Error(response.stderr.trim());
|
||||
}
|
||||
return response.stdout.trim()
|
||||
}
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported')
|
||||
const tagIsBeta = tag => tag.includes('beta')
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag)
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsBeta = tag => tag.includes('beta');
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag);
|
||||
|
||||
const getTagsOf = async (point) => {
|
||||
return (await runGit(['tag', '--merged', point]))
|
||||
.split('\n')
|
||||
.map(tag => tag.trim())
|
||||
.filter(tag => semver.valid(tag))
|
||||
.sort(semver.compare)
|
||||
}
|
||||
.sort(semver.compare);
|
||||
};
|
||||
|
||||
const getTagsOnBranch = async (point) => {
|
||||
const masterTags = await getTagsOf('master')
|
||||
const masterTags = await getTagsOf('master');
|
||||
if (point === 'master') {
|
||||
return masterTags
|
||||
return masterTags;
|
||||
}
|
||||
|
||||
const masterTagsSet = new Set(masterTags)
|
||||
return (await getTagsOf(point)).filter(tag => !masterTagsSet.has(tag))
|
||||
}
|
||||
const masterTagsSet = new Set(masterTags);
|
||||
return (await getTagsOf(point)).filter(tag => !masterTagsSet.has(tag));
|
||||
};
|
||||
|
||||
const getBranchOf = async (point) => {
|
||||
const branches = (await runGit(['branch', '-a', '--contains', point]))
|
||||
.split('\n')
|
||||
.map(branch => branch.trim())
|
||||
.filter(branch => !!branch)
|
||||
const current = branches.find(branch => branch.startsWith('* '))
|
||||
return current ? current.slice(2) : branches.shift()
|
||||
}
|
||||
.filter(branch => !!branch);
|
||||
const current = branches.find(branch => branch.startsWith('* '));
|
||||
return current ? current.slice(2) : branches.shift();
|
||||
};
|
||||
|
||||
const getAllBranches = async () => {
|
||||
return (await runGit(['branch', '--remote']))
|
||||
|
@ -55,101 +55,101 @@ const getAllBranches = async () => {
|
|||
.map(branch => branch.trim())
|
||||
.filter(branch => !!branch)
|
||||
.filter(branch => branch !== 'origin/HEAD -> origin/master')
|
||||
.sort()
|
||||
}
|
||||
.sort();
|
||||
};
|
||||
|
||||
const getStabilizationBranches = async () => {
|
||||
return (await getAllBranches())
|
||||
.filter(branch => /^origin\/\d+-\d+-x$/.test(branch))
|
||||
}
|
||||
.filter(branch => /^origin\/\d+-\d+-x$/.test(branch));
|
||||
};
|
||||
|
||||
const getPreviousStabilizationBranch = async (current) => {
|
||||
const stabilizationBranches = (await getStabilizationBranches())
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`)
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`);
|
||||
|
||||
if (!semver.valid(current)) {
|
||||
// since we don't seem to be on a stabilization branch right now,
|
||||
// pick a placeholder name that will yield the newest branch
|
||||
// as a comparison point.
|
||||
current = 'v999.999.999'
|
||||
current = 'v999.999.999';
|
||||
}
|
||||
|
||||
let newestMatch = null
|
||||
let newestMatch = null;
|
||||
for (const branch of stabilizationBranches) {
|
||||
if (semver.gte(semverify(branch), semverify(current))) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
newestMatch = branch
|
||||
newestMatch = branch;
|
||||
}
|
||||
return newestMatch
|
||||
}
|
||||
return newestMatch;
|
||||
};
|
||||
|
||||
const getPreviousPoint = async (point) => {
|
||||
const currentBranch = await getBranchOf(point)
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()
|
||||
const currentIsStable = tagIsStable(currentTag)
|
||||
const currentBranch = await getBranchOf(point);
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop();
|
||||
const currentIsStable = tagIsStable(currentTag);
|
||||
|
||||
try {
|
||||
// First see if there's an earlier tag on the same branch
|
||||
// that can serve as a reference point.
|
||||
let tags = (await getTagsOnBranch(`${point}^`)).filter(tag => tagIsSupported(tag))
|
||||
let tags = (await getTagsOnBranch(`${point}^`)).filter(tag => tagIsSupported(tag));
|
||||
if (currentIsStable) {
|
||||
tags = tags.filter(tag => tagIsStable(tag))
|
||||
tags = tags.filter(tag => tagIsStable(tag));
|
||||
}
|
||||
if (tags.length) {
|
||||
return tags.pop()
|
||||
return tags.pop();
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error', error)
|
||||
console.log('error', error);
|
||||
}
|
||||
|
||||
// Otherwise, use the newest stable release that preceeds this branch.
|
||||
// To reach that you may have to walk past >1 branch, e.g. to get past
|
||||
// 2-1-x which never had a stable release.
|
||||
let branch = currentBranch
|
||||
let branch = currentBranch;
|
||||
while (branch) {
|
||||
const prevBranch = await getPreviousStabilizationBranch(branch)
|
||||
const tags = (await getTagsOnBranch(prevBranch)).filter(tag => tagIsStable(tag))
|
||||
const prevBranch = await getPreviousStabilizationBranch(branch);
|
||||
const tags = (await getTagsOnBranch(prevBranch)).filter(tag => tagIsStable(tag));
|
||||
if (tags.length) {
|
||||
return tags.pop()
|
||||
return tags.pop();
|
||||
}
|
||||
branch = prevBranch
|
||||
branch = prevBranch;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function getReleaseNotes (range, newVersion, explicitLinks) {
|
||||
const rangeList = range.split('..') || ['HEAD']
|
||||
const to = rangeList.pop()
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to))
|
||||
const rangeList = range.split('..') || ['HEAD'];
|
||||
const to = rangeList.pop();
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to));
|
||||
|
||||
if (!newVersion) {
|
||||
newVersion = to
|
||||
newVersion = to;
|
||||
}
|
||||
|
||||
console.log(`Generating release notes between ${from} and ${to} for version ${newVersion}`)
|
||||
const notes = await notesGenerator.get(from, to, newVersion)
|
||||
console.log(`Generating release notes between ${from} and ${to} for version ${newVersion}`);
|
||||
const notes = await notesGenerator.get(from, to, newVersion);
|
||||
const ret = {
|
||||
text: notesGenerator.render(notes, explicitLinks)
|
||||
}
|
||||
};
|
||||
|
||||
if (notes.unknown.length) {
|
||||
ret.warning = `You have ${notes.unknown.length} unknown release notes. Please fix them before releasing.`
|
||||
ret.warning = `You have ${notes.unknown.length} unknown release notes. Please fix them before releasing.`;
|
||||
}
|
||||
|
||||
return ret
|
||||
return ret;
|
||||
}
|
||||
|
||||
async function main () {
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['explicit-links', 'help'],
|
||||
string: ['version']
|
||||
})
|
||||
opts.range = opts._.shift()
|
||||
});
|
||||
opts.range = opts._.shift();
|
||||
if (opts.help || !opts.range) {
|
||||
const name = path.basename(process.argv[1])
|
||||
const name = path.basename(process.argv[1]);
|
||||
console.log(`
|
||||
easy usage: ${name} version
|
||||
|
||||
|
@ -165,22 +165,22 @@ full usage: ${name} [begin..]end [--version version] [--explicit-links]
|
|||
For example, these invocations are equivalent:
|
||||
${process.argv[1]} v4.0.1
|
||||
${process.argv[1]} v4.0.0..v4.0.1 --version v4.0.1
|
||||
`)
|
||||
return 0
|
||||
`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts['explicit-links'])
|
||||
console.log(notes.text)
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts['explicit-links']);
|
||||
console.log(notes.text);
|
||||
if (notes.warning) {
|
||||
throw new Error(notes.warning)
|
||||
throw new Error(notes.warning);
|
||||
}
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((err) => {
|
||||
console.error('Error Occurred:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error('Error Occurred:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getReleaseNotes
|
||||
module.exports = getReleaseNotes;
|
||||
|
|
|
@ -1,75 +1,75 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require('child_process')
|
||||
const fs = require('fs')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const childProcess = require('child_process');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const { GitProcess } = require('dugite');
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
const semver = require('semver')
|
||||
});
|
||||
const semver = require('semver');
|
||||
|
||||
const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils')
|
||||
const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils');
|
||||
|
||||
const MAX_FAIL_COUNT = 3
|
||||
const CHECK_INTERVAL = 5000
|
||||
const MAX_FAIL_COUNT = 3;
|
||||
const CHECK_INTERVAL = 5000;
|
||||
|
||||
const CACHE_DIR = path.resolve(__dirname, '.cache')
|
||||
const NO_NOTES = 'No notes'
|
||||
const FOLLOW_REPOS = ['electron/electron', 'electron/node']
|
||||
const CACHE_DIR = path.resolve(__dirname, '.cache');
|
||||
const NO_NOTES = 'No notes';
|
||||
const FOLLOW_REPOS = ['electron/electron', 'electron/node'];
|
||||
|
||||
const breakTypes = new Set(['breaking-change'])
|
||||
const docTypes = new Set(['doc', 'docs'])
|
||||
const featTypes = new Set(['feat', 'feature'])
|
||||
const fixTypes = new Set(['fix'])
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci'])
|
||||
const knownTypes = new Set([...breakTypes.keys(), ...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()])
|
||||
const breakTypes = new Set(['breaking-change']);
|
||||
const docTypes = new Set(['doc', 'docs']);
|
||||
const featTypes = new Set(['feat', 'feature']);
|
||||
const fixTypes = new Set(['fix']);
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci']);
|
||||
const knownTypes = new Set([...breakTypes.keys(), ...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]);
|
||||
|
||||
const runGit = async (dir, args) => {
|
||||
const response = await GitProcess.exec(args, dir)
|
||||
const response = await GitProcess.exec(args, dir);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim())
|
||||
throw new Error(response.stderr.trim());
|
||||
}
|
||||
return response.stdout.trim()
|
||||
}
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getCommonAncestor = async (dir, point1, point2) => {
|
||||
return runGit(dir, ['merge-base', point1, point2])
|
||||
}
|
||||
return runGit(dir, ['merge-base', point1, point2]);
|
||||
};
|
||||
|
||||
const setPullRequest = (commit, owner, repo, number) => {
|
||||
if (!owner || !repo || !number) {
|
||||
throw new Error(JSON.stringify({ owner, repo, number }, null, 2))
|
||||
throw new Error(JSON.stringify({ owner, repo, number }, null, 2));
|
||||
}
|
||||
|
||||
if (!commit.originalPr) {
|
||||
commit.originalPr = commit.pr
|
||||
commit.originalPr = commit.pr;
|
||||
}
|
||||
|
||||
commit.pr = { owner, repo, number }
|
||||
commit.pr = { owner, repo, number };
|
||||
|
||||
if (!commit.originalPr) {
|
||||
commit.originalPr = commit.pr
|
||||
commit.originalPr = commit.pr;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getNoteFromClerk = async (number, owner, repo) => {
|
||||
const comments = await getComments(number, owner, repo)
|
||||
if (!comments || !comments.data) return
|
||||
const comments = await getComments(number, owner, repo);
|
||||
if (!comments || !comments.data) return;
|
||||
|
||||
const CLERK_LOGIN = 'release-clerk[bot]'
|
||||
const CLERK_NO_NOTES = '**No Release Notes**'
|
||||
const PERSIST_LEAD = '**Release Notes Persisted**\n\n'
|
||||
const QUOTE_LEAD = '> '
|
||||
const CLERK_LOGIN = 'release-clerk[bot]';
|
||||
const CLERK_NO_NOTES = '**No Release Notes**';
|
||||
const PERSIST_LEAD = '**Release Notes Persisted**\n\n';
|
||||
const QUOTE_LEAD = '> ';
|
||||
|
||||
for (const comment of comments.data.reverse()) {
|
||||
if (comment.user.login !== CLERK_LOGIN) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (comment.body === CLERK_NO_NOTES) {
|
||||
return NO_NOTES
|
||||
return NO_NOTES;
|
||||
}
|
||||
if (comment.body.startsWith(PERSIST_LEAD)) {
|
||||
return comment.body
|
||||
|
@ -79,10 +79,10 @@ const getNoteFromClerk = async (number, owner, repo) => {
|
|||
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map(line => line.slice(QUOTE_LEAD.length)) // unquote the lines
|
||||
.join(' ') // join the note lines
|
||||
.trim()
|
||||
.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// copied from https://github.com/electron/clerk/blob/master/src/index.ts#L4-L13
|
||||
const OMIT_FROM_RELEASE_NOTES_KEYS = [
|
||||
|
@ -94,36 +94,36 @@ const OMIT_FROM_RELEASE_NOTES_KEYS = [
|
|||
'nothing',
|
||||
'empty',
|
||||
'blank'
|
||||
]
|
||||
];
|
||||
|
||||
const getNoteFromBody = body => {
|
||||
if (!body) {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
const NOTE_PREFIX = 'Notes: '
|
||||
const NOTE_HEADER = '#### Release Notes'
|
||||
const NOTE_PREFIX = 'Notes: ';
|
||||
const NOTE_HEADER = '#### Release Notes';
|
||||
|
||||
let note = body
|
||||
.split(/\r?\n\r?\n/) // split into paragraphs
|
||||
.map(paragraph => paragraph.trim())
|
||||
.map(paragraph => paragraph.startsWith(NOTE_HEADER) ? paragraph.slice(NOTE_HEADER.length).trim() : paragraph)
|
||||
.find(paragraph => paragraph.startsWith(NOTE_PREFIX))
|
||||
.find(paragraph => paragraph.startsWith(NOTE_PREFIX));
|
||||
|
||||
if (note) {
|
||||
note = note
|
||||
.slice(NOTE_PREFIX.length)
|
||||
.replace(/<!--.*-->/, '') // '<!-- change summary here-->'
|
||||
.replace(/\r?\n/, ' ') // remove newlines
|
||||
.trim()
|
||||
.trim();
|
||||
}
|
||||
|
||||
if (note && OMIT_FROM_RELEASE_NOTES_KEYS.includes(note.toLowerCase())) {
|
||||
return NO_NOTES
|
||||
return NO_NOTES;
|
||||
}
|
||||
|
||||
return note
|
||||
}
|
||||
return note;
|
||||
};
|
||||
|
||||
/**
|
||||
* Looks for our project's conventions in the commit message:
|
||||
|
@ -138,71 +138,71 @@ const getNoteFromBody = body => {
|
|||
*/
|
||||
const parseCommitMessage = (commitMessage, owner, repo, commit = {}) => {
|
||||
// split commitMessage into subject & body
|
||||
let subject = commitMessage
|
||||
let body = ''
|
||||
const pos = subject.indexOf('\n')
|
||||
let subject = commitMessage;
|
||||
let body = '';
|
||||
const pos = subject.indexOf('\n');
|
||||
if (pos !== -1) {
|
||||
body = subject.slice(pos).trim()
|
||||
subject = subject.slice(0, pos).trim()
|
||||
body = subject.slice(pos).trim();
|
||||
subject = subject.slice(0, pos).trim();
|
||||
}
|
||||
|
||||
if (!commit.originalSubject) {
|
||||
commit.originalSubject = subject
|
||||
commit.originalSubject = subject;
|
||||
}
|
||||
|
||||
if (body) {
|
||||
commit.body = body
|
||||
commit.body = body;
|
||||
|
||||
const note = getNoteFromBody(body)
|
||||
if (note) { commit.note = note }
|
||||
const note = getNoteFromBody(body);
|
||||
if (note) { commit.note = note; }
|
||||
}
|
||||
|
||||
// if the subject ends in ' (#dddd)', treat it as a pull request id
|
||||
let match
|
||||
let match;
|
||||
if ((match = subject.match(/^(.*)\s\(#(\d+)\)$/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[2]))
|
||||
subject = match[1]
|
||||
setPullRequest(commit, owner, repo, parseInt(match[2]));
|
||||
subject = match[1];
|
||||
}
|
||||
|
||||
// if the subject begins with 'word:', treat it as a semantic commit
|
||||
if ((match = subject.match(/^(\w+):\s(.*)$/))) {
|
||||
const type = match[1].toLocaleLowerCase()
|
||||
const type = match[1].toLocaleLowerCase();
|
||||
if (knownTypes.has(type)) {
|
||||
commit.type = type
|
||||
subject = match[2]
|
||||
commit.type = type;
|
||||
subject = match[2];
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GitHub commit message that indicates a PR
|
||||
if ((match = subject.match(/^Merge pull request #(\d+) from (.*)$/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]))
|
||||
commit.pr.branch = match[2].trim()
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]));
|
||||
commit.pr.branch = match[2].trim();
|
||||
}
|
||||
|
||||
// Check for a trop comment that indicates a PR
|
||||
if ((match = commitMessage.match(/\bBackport of #(\d+)\b/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]))
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]));
|
||||
}
|
||||
|
||||
// https://help.github.com/articles/closing-issues-using-keywords/
|
||||
if ((match = subject.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/))) {
|
||||
commit.issueNumber = parseInt(match[1])
|
||||
commit.issueNumber = parseInt(match[1]);
|
||||
if (!commit.type) {
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
}
|
||||
}
|
||||
|
||||
// look for 'fixes' in markdown; e.g. 'Fixes [#8952](https://github.com/electron/electron/issues/8952)'
|
||||
if (!commit.issueNumber && ((match = commitMessage.match(/Fixes \[#(\d+)\]\(https:\/\/github.com\/(\w+)\/(\w+)\/issues\/(\d+)\)/)))) {
|
||||
commit.issueNumber = parseInt(match[1])
|
||||
commit.issueNumber = parseInt(match[1]);
|
||||
if (commit.pr && commit.pr.number === commit.issueNumber) {
|
||||
commit.pr = null
|
||||
commit.pr = null;
|
||||
}
|
||||
if (commit.originalPr && commit.originalPr.number === commit.issueNumber) {
|
||||
commit.originalPr = null
|
||||
commit.originalPr = null;
|
||||
}
|
||||
if (!commit.type) {
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -211,55 +211,55 @@ const parseCommitMessage = (commitMessage, owner, repo, commit = {}) => {
|
|||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.some(line => line.startsWith('BREAKING CHANGE'))) {
|
||||
commit.type = 'breaking-change'
|
||||
commit.type = 'breaking-change';
|
||||
}
|
||||
|
||||
// Check for a reversion commit
|
||||
if ((match = body.match(/This reverts commit ([a-f0-9]{40})\./))) {
|
||||
commit.revertHash = match[1]
|
||||
commit.revertHash = match[1];
|
||||
}
|
||||
|
||||
// Edge case: manual backport where commit has `owner/repo#pull` notation
|
||||
if (commitMessage.toLowerCase().includes('backport') &&
|
||||
((match = commitMessage.match(/\b(\w+)\/(\w+)#(\d+)\b/)))) {
|
||||
const [, owner, repo, number] = match
|
||||
const [, owner, repo, number] = match;
|
||||
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
|
||||
setPullRequest(commit, owner, repo, number)
|
||||
setPullRequest(commit, owner, repo, number);
|
||||
}
|
||||
}
|
||||
|
||||
// Edge case: manual backport where commit has a link to the backport PR
|
||||
if (commitMessage.includes('ackport') &&
|
||||
((match = commitMessage.match(/https:\/\/github\.com\/(\w+)\/(\w+)\/pull\/(\d+)/)))) {
|
||||
const [, owner, repo, number] = match
|
||||
const [, owner, repo, number] = match;
|
||||
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
|
||||
setPullRequest(commit, owner, repo, number)
|
||||
setPullRequest(commit, owner, repo, number);
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy commits: pre-semantic commits
|
||||
if (!commit.type || commit.type === 'chore') {
|
||||
const commitMessageLC = commitMessage.toLocaleLowerCase()
|
||||
const commitMessageLC = commitMessage.toLocaleLowerCase();
|
||||
if ((match = commitMessageLC.match(/\bchore\((\w+)\):/))) {
|
||||
// example: 'Chore(docs): description'
|
||||
commit.type = knownTypes.has(match[1]) ? match[1] : 'chore'
|
||||
commit.type = knownTypes.has(match[1]) ? match[1] : 'chore';
|
||||
} else if (commitMessageLC.match(/\b(?:fix|fixes|fixed)/)) {
|
||||
// example: 'fix a bug'
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
} else if (commitMessageLC.match(/\[(?:docs|doc)\]/)) {
|
||||
// example: '[docs]
|
||||
commit.type = 'doc'
|
||||
commit.type = 'doc';
|
||||
}
|
||||
}
|
||||
|
||||
commit.subject = subject.trim()
|
||||
return commit
|
||||
}
|
||||
commit.subject = subject.trim();
|
||||
return commit;
|
||||
};
|
||||
|
||||
const getLocalCommitHashes = async (dir, ref) => {
|
||||
const args = ['log', '-z', '--format=%H', ref]
|
||||
return (await runGit(dir, args)).split('\0').map(hash => hash.trim())
|
||||
}
|
||||
const args = ['log', '-z', '--format=%H', ref];
|
||||
return (await runGit(dir, args)).split('\0').map(hash => hash.trim());
|
||||
};
|
||||
|
||||
/*
|
||||
* possible properties:
|
||||
|
@ -267,75 +267,75 @@ const getLocalCommitHashes = async (dir, ref) => {
|
|||
* pr { owner, repo, number, branch }, revertHash, subject, type
|
||||
*/
|
||||
const getLocalCommitDetails = async (module, point1, point2) => {
|
||||
const { owner, repo, dir } = module
|
||||
const { owner, repo, dir } = module;
|
||||
|
||||
const fieldSep = '||'
|
||||
const format = ['%H', '%P', '%aE', '%B'].join(fieldSep)
|
||||
const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`]
|
||||
const commits = (await runGit(dir, args)).split('\0').map(field => field.trim())
|
||||
const details = []
|
||||
const fieldSep = '||';
|
||||
const format = ['%H', '%P', '%aE', '%B'].join(fieldSep);
|
||||
const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
||||
const commits = (await runGit(dir, args)).split('\0').map(field => field.trim());
|
||||
const details = [];
|
||||
for (const commit of commits) {
|
||||
if (!commit) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const [hash, parentHashes, email, commitMessage] = commit.split(fieldSep, 4).map(field => field.trim())
|
||||
const [hash, parentHashes, email, commitMessage] = commit.split(fieldSep, 4).map(field => field.trim());
|
||||
details.push(parseCommitMessage(commitMessage, owner, repo, {
|
||||
email,
|
||||
hash,
|
||||
owner,
|
||||
repo,
|
||||
parentHashes: parentHashes.split()
|
||||
}))
|
||||
}));
|
||||
}
|
||||
return details
|
||||
}
|
||||
return details;
|
||||
};
|
||||
|
||||
const checkCache = async (name, operation) => {
|
||||
const filename = path.resolve(CACHE_DIR, name)
|
||||
const filename = path.resolve(CACHE_DIR, name);
|
||||
if (fs.existsSync(filename)) {
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'))
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
||||
}
|
||||
const response = await operation()
|
||||
const response = await operation();
|
||||
if (response) {
|
||||
fs.writeFileSync(filename, JSON.stringify(response))
|
||||
fs.writeFileSync(filename, JSON.stringify(response));
|
||||
}
|
||||
return response
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
// helper function to add some resiliency to volatile GH api endpoints
|
||||
async function runRetryable (fn, maxRetries) {
|
||||
let lastError
|
||||
let lastError;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
return await fn()
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, CHECK_INTERVAL))
|
||||
lastError = error
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error;
|
||||
}
|
||||
}
|
||||
// Silently eat 404s.
|
||||
if (lastError.status !== 404) throw lastError
|
||||
if (lastError.status !== 404) throw lastError;
|
||||
}
|
||||
|
||||
const getPullRequest = async (number, owner, repo) => {
|
||||
const name = `${owner}-${repo}-pull-${number}`
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo })
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT))
|
||||
}
|
||||
const name = `${owner}-${repo}-pull-${number}`;
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const getComments = async (number, owner, repo) => {
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 })
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT))
|
||||
}
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`;
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const addRepoToPool = async (pool, repo, from, to) => {
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to)
|
||||
const oldHashes = await getLocalCommitHashes(repo.dir, from)
|
||||
oldHashes.forEach(hash => { pool.processedHashes.add(hash) })
|
||||
const commits = await getLocalCommitDetails(repo, commonAncestor, to)
|
||||
pool.commits.push(...commits)
|
||||
}
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to);
|
||||
const oldHashes = await getLocalCommitHashes(repo.dir, from);
|
||||
oldHashes.forEach(hash => { pool.processedHashes.add(hash); });
|
||||
const commits = await getLocalCommitDetails(repo, commonAncestor, to);
|
||||
pool.commits.push(...commits);
|
||||
};
|
||||
|
||||
/***
|
||||
**** Other Repos
|
||||
|
@ -345,21 +345,21 @@ const addRepoToPool = async (pool, repo, from, to) => {
|
|||
|
||||
const getDepsVariable = async (ref, key) => {
|
||||
// get a copy of that reference point's DEPS file
|
||||
const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`])
|
||||
const filename = path.resolve(os.tmpdir(), 'DEPS')
|
||||
fs.writeFileSync(filename, deps)
|
||||
const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`]);
|
||||
const filename = path.resolve(os.tmpdir(), 'DEPS');
|
||||
fs.writeFileSync(filename, deps);
|
||||
|
||||
// query the DEPS file
|
||||
const response = childProcess.spawnSync(
|
||||
'gclient',
|
||||
['getdep', '--deps-file', filename, '--var', key],
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
);
|
||||
|
||||
// cleanup
|
||||
fs.unlinkSync(filename)
|
||||
return response.stdout.trim()
|
||||
}
|
||||
fs.unlinkSync(filename);
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
||||
const repos = [{ // just node
|
||||
|
@ -367,16 +367,16 @@ const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
|||
repo: 'node',
|
||||
dir: path.resolve(SRC_DIR, 'third_party', 'electron_node'),
|
||||
deps_variable_name: 'node_version'
|
||||
}]
|
||||
}];
|
||||
|
||||
for (const repo of repos) {
|
||||
// the 'DEPS' file holds the dependency reference point
|
||||
const key = repo.deps_variable_name
|
||||
const from = await getDepsVariable(fromRef, key)
|
||||
const to = await getDepsVariable(toRef, key)
|
||||
await addRepoToPool(pool, repo, from, to)
|
||||
const key = repo.deps_variable_name;
|
||||
const from = await getDepsVariable(fromRef, key);
|
||||
const to = await getDepsVariable(toRef, key);
|
||||
await addRepoToPool(pool, repo, from, to);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Changes are interesting if they make a change relative to a previous
|
||||
// release in the same series. For example if you fix a Y.0.0 bug, that
|
||||
|
@ -388,17 +388,17 @@ const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
|||
// branches' changes. Otherwise we will have an overwhelmingly long
|
||||
// list of mostly-irrelevant changes.
|
||||
const shouldIncludeMultibranchChanges = (version) => {
|
||||
let show = true
|
||||
let show = true;
|
||||
|
||||
if (semver.valid(version)) {
|
||||
const prerelease = semver.prerelease(version)
|
||||
const prerelease = semver.prerelease(version);
|
||||
show = prerelease
|
||||
? parseInt(prerelease.pop()) > 1
|
||||
: semver.patch(version) > 0
|
||||
: semver.patch(version) > 0;
|
||||
}
|
||||
|
||||
return show
|
||||
}
|
||||
return show;
|
||||
};
|
||||
|
||||
/***
|
||||
**** Main
|
||||
|
@ -406,131 +406,131 @@ const shouldIncludeMultibranchChanges = (version) => {
|
|||
|
||||
const getNotes = async (fromRef, toRef, newVersion) => {
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR)
|
||||
fs.mkdirSync(CACHE_DIR);
|
||||
}
|
||||
|
||||
const pool = {
|
||||
processedHashes: new Set(),
|
||||
commits: []
|
||||
}
|
||||
};
|
||||
|
||||
// get the electron/electron commits
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION }
|
||||
await addRepoToPool(pool, electron, fromRef, toRef)
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION };
|
||||
await addRepoToPool(pool, electron, fromRef, toRef);
|
||||
|
||||
// Don't include submodules if comparing across major versions;
|
||||
// there's just too much churn otherwise.
|
||||
const includeDeps = semver.valid(fromRef) &&
|
||||
semver.valid(toRef) &&
|
||||
semver.major(fromRef) === semver.major(toRef)
|
||||
semver.major(fromRef) === semver.major(toRef);
|
||||
|
||||
if (includeDeps) {
|
||||
await getDependencyCommitsGN(pool, fromRef, toRef)
|
||||
await getDependencyCommitsGN(pool, fromRef, toRef);
|
||||
}
|
||||
|
||||
// remove any old commits
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash))
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash));
|
||||
|
||||
// if a commmit _and_ revert occurred in the unprocessed set, skip them both
|
||||
for (const commit of pool.commits) {
|
||||
const revertHash = commit.revertHash
|
||||
const revertHash = commit.revertHash;
|
||||
if (!revertHash) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash)
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash);
|
||||
if (!revert) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
commit.note = NO_NOTES
|
||||
revert.note = NO_NOTES
|
||||
pool.processedHashes.add(commit.hash)
|
||||
pool.processedHashes.add(revertHash)
|
||||
commit.note = NO_NOTES;
|
||||
revert.note = NO_NOTES;
|
||||
pool.processedHashes.add(commit.hash);
|
||||
pool.processedHashes.add(revertHash);
|
||||
}
|
||||
|
||||
// scrape PRs for release note 'Notes:' comments
|
||||
for (const commit of pool.commits) {
|
||||
let pr = commit.pr
|
||||
let pr = commit.pr;
|
||||
|
||||
let prSubject
|
||||
let prSubject;
|
||||
while (pr && !commit.note) {
|
||||
const note = await getNoteFromClerk(pr.number, pr.owner, pr.repo)
|
||||
const note = await getNoteFromClerk(pr.number, pr.owner, pr.repo);
|
||||
if (note) {
|
||||
commit.note = note
|
||||
commit.note = note;
|
||||
}
|
||||
|
||||
// if we already have all the data we need, stop scraping the PRs
|
||||
if (commit.note && commit.type && prSubject) {
|
||||
break
|
||||
break;
|
||||
}
|
||||
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo);
|
||||
if (!prData || !prData.data) {
|
||||
break
|
||||
break;
|
||||
}
|
||||
|
||||
// try to pull a release note from the pull comment
|
||||
const prParsed = parseCommitMessage(`${prData.data.title}\n\n${prData.data.body}`, pr.owner, pr.repo)
|
||||
const prParsed = parseCommitMessage(`${prData.data.title}\n\n${prData.data.body}`, pr.owner, pr.repo);
|
||||
if (!commit.note) {
|
||||
commit.note = prParsed.note
|
||||
commit.note = prParsed.note;
|
||||
}
|
||||
if (!commit.type || prParsed.type === 'breaking-change') {
|
||||
commit.type = prParsed.type
|
||||
commit.type = prParsed.type;
|
||||
}
|
||||
prSubject = prSubject || prParsed.subject
|
||||
prSubject = prSubject || prParsed.subject;
|
||||
|
||||
pr = prParsed.pr && (prParsed.pr.number !== pr.number) ? prParsed.pr : null
|
||||
pr = prParsed.pr && (prParsed.pr.number !== pr.number) ? prParsed.pr : null;
|
||||
}
|
||||
|
||||
// if we still don't have a note, it's because someone missed a 'Notes:
|
||||
// comment in a PR somewhere... use the PR subject as a fallback.
|
||||
commit.note = commit.note || prSubject
|
||||
commit.note = commit.note || prSubject;
|
||||
}
|
||||
|
||||
// remove non-user-facing commits
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => commit.note !== NO_NOTES)
|
||||
.filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)))
|
||||
.filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)));
|
||||
|
||||
if (!shouldIncludeMultibranchChanges(newVersion)) {
|
||||
// load all the prDatas
|
||||
await Promise.all(
|
||||
pool.commits.map(commit => (async () => {
|
||||
const { pr } = commit
|
||||
const { pr } = commit;
|
||||
if (typeof pr === 'object') {
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo);
|
||||
if (prData) {
|
||||
commit.prData = prData
|
||||
commit.prData = prData;
|
||||
}
|
||||
}
|
||||
})())
|
||||
)
|
||||
);
|
||||
|
||||
// remove items that already landed in a previous major/minor series
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => {
|
||||
if (!commit.prData) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
const reducer = (accumulator, current) => {
|
||||
if (!semver.valid(accumulator)) { return current }
|
||||
if (!semver.valid(current)) { return accumulator }
|
||||
return semver.lt(accumulator, current) ? accumulator : current
|
||||
}
|
||||
if (!semver.valid(accumulator)) { return current; }
|
||||
if (!semver.valid(current)) { return accumulator; }
|
||||
return semver.lt(accumulator, current) ? accumulator : current;
|
||||
};
|
||||
const earliestRelease = commit.prData.data.labels
|
||||
.map(label => label.name.match(/merged\/(\d+)-(\d+)-x/))
|
||||
.filter(label => !!label)
|
||||
.map(label => `${label[1]}.${label[2]}.0`)
|
||||
.reduce(reducer, null)
|
||||
.reduce(reducer, null);
|
||||
if (!semver.valid(earliestRelease)) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
return semver.diff(earliestRelease, newVersion).includes('patch')
|
||||
})
|
||||
return semver.diff(earliestRelease, newVersion).includes('patch');
|
||||
});
|
||||
}
|
||||
|
||||
pool.commits = removeSupercededChromiumUpdates(pool.commits)
|
||||
pool.commits = removeSupercededChromiumUpdates(pool.commits);
|
||||
|
||||
const notes = {
|
||||
breaking: [],
|
||||
|
@ -540,78 +540,78 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
other: [],
|
||||
unknown: [],
|
||||
name: newVersion
|
||||
}
|
||||
};
|
||||
|
||||
pool.commits.forEach(commit => {
|
||||
const str = commit.type
|
||||
const str = commit.type;
|
||||
if (!str) {
|
||||
notes.unknown.push(commit)
|
||||
notes.unknown.push(commit);
|
||||
} else if (breakTypes.has(str)) {
|
||||
notes.breaking.push(commit)
|
||||
notes.breaking.push(commit);
|
||||
} else if (docTypes.has(str)) {
|
||||
notes.docs.push(commit)
|
||||
notes.docs.push(commit);
|
||||
} else if (featTypes.has(str)) {
|
||||
notes.feat.push(commit)
|
||||
notes.feat.push(commit);
|
||||
} else if (fixTypes.has(str)) {
|
||||
notes.fix.push(commit)
|
||||
notes.fix.push(commit);
|
||||
} else if (otherTypes.has(str)) {
|
||||
notes.other.push(commit)
|
||||
notes.other.push(commit);
|
||||
} else {
|
||||
notes.unknown.push(commit)
|
||||
notes.unknown.push(commit);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return notes
|
||||
}
|
||||
return notes;
|
||||
};
|
||||
|
||||
const removeSupercededChromiumUpdates = (commits) => {
|
||||
const chromiumRegex = /^Updated Chromium to \d+\.\d+\.\d+\.\d+/
|
||||
const updates = commits.filter(commit => (commit.note || commit.subject).match(chromiumRegex))
|
||||
const keepers = commits.filter(commit => !updates.includes(commit))
|
||||
const chromiumRegex = /^Updated Chromium to \d+\.\d+\.\d+\.\d+/;
|
||||
const updates = commits.filter(commit => (commit.note || commit.subject).match(chromiumRegex));
|
||||
const keepers = commits.filter(commit => !updates.includes(commit));
|
||||
|
||||
// keep the newest update.
|
||||
if (updates.length) {
|
||||
updates.sort((a, b) => a.originalPr.number - b.originalPr.number)
|
||||
keepers.push(updates.pop())
|
||||
updates.sort((a, b) => a.originalPr.number - b.originalPr.number);
|
||||
keepers.push(updates.pop());
|
||||
}
|
||||
|
||||
return keepers
|
||||
}
|
||||
return keepers;
|
||||
};
|
||||
|
||||
/***
|
||||
**** Render
|
||||
***/
|
||||
|
||||
const renderLink = (commit, explicitLinks) => {
|
||||
let link
|
||||
const pr = commit.originalPr
|
||||
let link;
|
||||
const pr = commit.originalPr;
|
||||
if (pr) {
|
||||
const { owner, repo, number } = pr
|
||||
const url = `https://github.com/${owner}/${repo}/pull/${number}`
|
||||
const { owner, repo, number } = pr;
|
||||
const url = `https://github.com/${owner}/${repo}/pull/${number}`;
|
||||
const text = owner === 'electron' && repo === 'electron'
|
||||
? `#${number}`
|
||||
: `${owner}/${repo}#${number}`
|
||||
link = explicitLinks ? `[${text}](${url})` : text
|
||||
: `${owner}/${repo}#${number}`;
|
||||
link = explicitLinks ? `[${text}](${url})` : text;
|
||||
} else {
|
||||
const { owner, repo, hash } = commit
|
||||
const url = `https://github.com/${owner}/${repo}/commit/${hash}`
|
||||
const { owner, repo, hash } = commit;
|
||||
const url = `https://github.com/${owner}/${repo}/commit/${hash}`;
|
||||
const text = owner === 'electron' && repo === 'electron'
|
||||
? `${hash.slice(0, 8)}`
|
||||
: `${owner}/${repo}@${hash.slice(0, 8)}`
|
||||
link = explicitLinks ? `[${text}](${url})` : text
|
||||
: `${owner}/${repo}@${hash.slice(0, 8)}`;
|
||||
link = explicitLinks ? `[${text}](${url})` : text;
|
||||
}
|
||||
return link
|
||||
}
|
||||
return link;
|
||||
};
|
||||
|
||||
const renderCommit = (commit, explicitLinks) => {
|
||||
// clean up the note
|
||||
let note = commit.note || commit.subject
|
||||
note = note.trim()
|
||||
let note = commit.note || commit.subject;
|
||||
note = note.trim();
|
||||
if (note.length !== 0) {
|
||||
note = note[0].toUpperCase() + note.substr(1)
|
||||
note = note[0].toUpperCase() + note.substr(1);
|
||||
|
||||
if (!note.endsWith('.')) {
|
||||
note = note + '.'
|
||||
note = note + '.';
|
||||
}
|
||||
|
||||
const commonVerbs = {
|
||||
|
@ -631,57 +631,57 @@ const renderCommit = (commit, explicitLinks) => {
|
|||
Stopped: ['Stop'],
|
||||
Updated: ['Update'],
|
||||
Upgraded: ['Upgrade']
|
||||
}
|
||||
};
|
||||
for (const [key, values] of Object.entries(commonVerbs)) {
|
||||
for (const value of values) {
|
||||
const start = `${value} `
|
||||
const start = `${value} `;
|
||||
if (note.startsWith(start)) {
|
||||
note = `${key} ${note.slice(start.length)}`
|
||||
note = `${key} ${note.slice(start.length)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const link = renderLink(commit, explicitLinks)
|
||||
const link = renderLink(commit, explicitLinks);
|
||||
|
||||
return { note, link }
|
||||
}
|
||||
return { note, link };
|
||||
};
|
||||
|
||||
const renderNotes = (notes, explicitLinks) => {
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`]
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`];
|
||||
|
||||
const renderSection = (title, commits) => {
|
||||
if (commits.length === 0) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const notes = new Map()
|
||||
const notes = new Map();
|
||||
for (const note of commits.map(commit => renderCommit(commit, explicitLinks))) {
|
||||
if (!notes.has(note.note)) {
|
||||
notes.set(note.note, [note.link])
|
||||
notes.set(note.note, [note.link]);
|
||||
} else {
|
||||
notes.get(note.note).push(note.link)
|
||||
notes.get(note.note).push(note.link);
|
||||
}
|
||||
}
|
||||
rendered.push(`## ${title}\n\n`)
|
||||
const lines = []
|
||||
notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`))
|
||||
rendered.push(...lines.sort(), '\n')
|
||||
}
|
||||
rendered.push(`## ${title}\n\n`);
|
||||
const lines = [];
|
||||
notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`));
|
||||
rendered.push(...lines.sort(), '\n');
|
||||
};
|
||||
|
||||
renderSection('Breaking Changes', notes.breaking)
|
||||
renderSection('Features', notes.feat)
|
||||
renderSection('Fixes', notes.fix)
|
||||
renderSection('Other Changes', notes.other)
|
||||
renderSection('Breaking Changes', notes.breaking);
|
||||
renderSection('Features', notes.feat);
|
||||
renderSection('Fixes', notes.fix);
|
||||
renderSection('Other Changes', notes.other);
|
||||
|
||||
if (notes.docs.length) {
|
||||
const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort()
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n')
|
||||
const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort();
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n');
|
||||
}
|
||||
|
||||
renderSection('Unknown', notes.unknown)
|
||||
renderSection('Unknown', notes.unknown);
|
||||
|
||||
return rendered.join('')
|
||||
}
|
||||
return rendered.join('');
|
||||
};
|
||||
|
||||
/***
|
||||
**** Module
|
||||
|
@ -690,4 +690,4 @@ const renderNotes = (notes, explicitLinks) => {
|
|||
module.exports = {
|
||||
get: getNotes,
|
||||
render: renderNotes
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,105 +1,105 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['automaticRelease', 'notesOnly', 'stable']
|
||||
})
|
||||
const ciReleaseBuild = require('./ci-release-build')
|
||||
});
|
||||
const ciReleaseBuild = require('./ci-release-build');
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
const { execSync } = require('child_process')
|
||||
const { GitProcess } = require('dugite')
|
||||
});
|
||||
const { execSync } = require('child_process');
|
||||
const { GitProcess } = require('dugite');
|
||||
|
||||
const path = require('path')
|
||||
const readline = require('readline')
|
||||
const releaseNotesGenerator = require('./notes/index.js')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
|
||||
const bumpType = args._[0]
|
||||
const targetRepo = bumpType === 'nightly' ? 'nightlies' : 'electron'
|
||||
const path = require('path');
|
||||
const readline = require('readline');
|
||||
const releaseNotesGenerator = require('./notes/index.js');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js');
|
||||
const bumpType = args._[0];
|
||||
const targetRepo = bumpType === 'nightly' ? 'nightlies' : 'electron';
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
if (!bumpType && !args.notesOnly) {
|
||||
console.log('Usage: prepare-release [stable | minor | beta | nightly]' +
|
||||
' (--stable) (--notesOnly) (--automaticRelease) (--branch)')
|
||||
process.exit(1)
|
||||
' (--stable) (--notesOnly) (--automaticRelease) (--branch)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function getNewVersion (dryRun) {
|
||||
if (!dryRun) {
|
||||
console.log(`Bumping for new "${bumpType}" version.`)
|
||||
console.log(`Bumping for new "${bumpType}" version.`);
|
||||
}
|
||||
const bumpScript = path.join(__dirname, 'version-bumper.js')
|
||||
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`]
|
||||
if (dryRun) scriptArgs.push('--dryRun')
|
||||
const bumpScript = path.join(__dirname, 'version-bumper.js');
|
||||
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`];
|
||||
if (dryRun) scriptArgs.push('--dryRun');
|
||||
try {
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' })
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim()
|
||||
const newVersion = `v${bumpVersion}`
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' });
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim();
|
||||
const newVersion = `v${bumpVersion}`;
|
||||
if (!dryRun) {
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`)
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`);
|
||||
}
|
||||
return newVersion
|
||||
return newVersion;
|
||||
} catch (err) {
|
||||
console.log(`${fail} Could not bump version, error was:`, err)
|
||||
throw err
|
||||
console.log(`${fail} Could not bump version, error was:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function getReleaseNotes (currentBranch, newVersion) {
|
||||
if (bumpType === 'nightly') {
|
||||
return { text: 'Nightlies do not get release notes, please compare tags for info.' }
|
||||
return { text: 'Nightlies do not get release notes, please compare tags for info.' };
|
||||
}
|
||||
console.log(`Generating release notes for ${currentBranch}.`)
|
||||
const releaseNotes = await releaseNotesGenerator(currentBranch, newVersion)
|
||||
console.log(`Generating release notes for ${currentBranch}.`);
|
||||
const releaseNotes = await releaseNotesGenerator(currentBranch, newVersion);
|
||||
if (releaseNotes.warning) {
|
||||
console.warn(releaseNotes.warning)
|
||||
console.warn(releaseNotes.warning);
|
||||
}
|
||||
return releaseNotes
|
||||
return releaseNotes;
|
||||
}
|
||||
|
||||
async function createRelease (branchToTarget, isBeta) {
|
||||
const newVersion = await getNewVersion()
|
||||
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion)
|
||||
await tagRelease(newVersion)
|
||||
const newVersion = await getNewVersion();
|
||||
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion);
|
||||
await tagRelease(newVersion);
|
||||
|
||||
console.log('Checking for existing draft release.')
|
||||
console.log('Checking for existing draft release.');
|
||||
const releases = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: targetRepo
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Could not get releases. Error was: `, err)
|
||||
})
|
||||
console.log(`${fail} Could not get releases. Error was: `, err);
|
||||
});
|
||||
|
||||
const drafts = releases.data.filter(release => release.draft &&
|
||||
release.tag_name === newVersion)
|
||||
release.tag_name === newVersion);
|
||||
if (drafts.length > 0) {
|
||||
console.log(`${fail} Aborting because draft release for
|
||||
${drafts[0].tag_name} already exists.`)
|
||||
process.exit(1)
|
||||
${drafts[0].tag_name} already exists.`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} A draft release does not exist; creating one.`)
|
||||
console.log(`${pass} A draft release does not exist; creating one.`);
|
||||
|
||||
let releaseBody
|
||||
let releaseIsPrelease = false
|
||||
let releaseBody;
|
||||
let releaseIsPrelease = false;
|
||||
if (isBeta) {
|
||||
if (newVersion.indexOf('nightly') > 0) {
|
||||
releaseBody = 'Note: This is a nightly release. Please file new issues ' +
|
||||
'for any bugs you find in it.\n \n This release is published to npm ' +
|
||||
'under the nightly tag and can be installed via npm install electron@nightly, ' +
|
||||
`or npm i electron-nightly@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
|
||||
`or npm i electron-nightly@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`;
|
||||
} else {
|
||||
releaseBody = 'Note: This is a beta release. Please file new issues ' +
|
||||
'for any bugs you find in it.\n \n This release is published to npm ' +
|
||||
'under the beta tag and can be installed via npm install electron@beta, ' +
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`;
|
||||
}
|
||||
releaseIsPrelease = true
|
||||
releaseIsPrelease = true;
|
||||
} else {
|
||||
releaseBody = releaseNotes.text
|
||||
releaseBody = releaseNotes.text;
|
||||
}
|
||||
|
||||
const release = await octokit.repos.createRelease({
|
||||
|
@ -112,22 +112,22 @@ async function createRelease (branchToTarget, isBeta) {
|
|||
prerelease: releaseIsPrelease,
|
||||
target_commitish: newVersion.indexOf('nightly') !== -1 ? 'master' : branchToTarget
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error creating new release: `, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error creating new release: `, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
console.log(`Release has been created with id: ${release.data.id}.`)
|
||||
console.log(`${pass} Draft release for ${newVersion} successful.`)
|
||||
console.log(`Release has been created with id: ${release.data.id}.`);
|
||||
console.log(`${pass} Draft release for ${newVersion} successful.`);
|
||||
}
|
||||
|
||||
async function pushRelease (branch) {
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully pushed the release. Wait for ` +
|
||||
'release builds to finish before running "npm run release".')
|
||||
'release builds to finish before running "npm run release".');
|
||||
} else {
|
||||
console.log(`${fail} Error pushing the release: ${pushDetails.stderr}`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error pushing the release: ${pushDetails.stderr}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,34 +135,34 @@ async function runReleaseBuilds (branch) {
|
|||
await ciReleaseBuild(branch, {
|
||||
ghRelease: true,
|
||||
automaticRelease: args.automaticRelease
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
async function tagRelease (version) {
|
||||
console.log(`Tagging release ${version}.`)
|
||||
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR)
|
||||
console.log(`Tagging release ${version}.`);
|
||||
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR);
|
||||
if (checkoutDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully tagged ${version}.`)
|
||||
console.log(`${pass} Successfully tagged ${version}.`);
|
||||
} else {
|
||||
console.log(`${fail} Error tagging ${version}: ` +
|
||||
`${checkoutDetails.stderr}`)
|
||||
process.exit(1)
|
||||
`${checkoutDetails.stderr}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyNewVersion () {
|
||||
const newVersion = await getNewVersion(true)
|
||||
let response
|
||||
const newVersion = await getNewVersion(true);
|
||||
let response;
|
||||
if (args.automaticRelease) {
|
||||
response = 'y'
|
||||
response = 'y';
|
||||
} else {
|
||||
response = await promptForVersion(newVersion)
|
||||
response = await promptForVersion(newVersion);
|
||||
}
|
||||
if (response.match(/^y/i)) {
|
||||
console.log(`${pass} Starting release of ${newVersion}`)
|
||||
console.log(`${pass} Starting release of ${newVersion}`);
|
||||
} else {
|
||||
console.log(`${fail} Aborting release of ${newVersion}`)
|
||||
process.exit()
|
||||
console.log(`${fail} Aborting release of ${newVersion}`);
|
||||
process.exit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,44 +171,44 @@ async function promptForVersion (version) {
|
|||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
});
|
||||
rl.question(`Do you want to create the release ${version.green} (y/N)? `, (answer) => {
|
||||
rl.close()
|
||||
resolve(answer)
|
||||
})
|
||||
})
|
||||
rl.close();
|
||||
resolve(answer);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// function to determine if there have been commits to master since the last release
|
||||
async function changesToRelease () {
|
||||
const lastCommitWasRelease = new RegExp('^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$', 'g')
|
||||
const lastCommit = await GitProcess.exec(['log', '-n', '1', '--pretty=format:\'%s\''], ELECTRON_DIR)
|
||||
return !lastCommitWasRelease.test(lastCommit.stdout)
|
||||
const lastCommitWasRelease = new RegExp('^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$', 'g');
|
||||
const lastCommit = await GitProcess.exec(['log', '-n', '1', '--pretty=format:\'%s\''], ELECTRON_DIR);
|
||||
return !lastCommitWasRelease.test(lastCommit.stdout);
|
||||
}
|
||||
|
||||
async function prepareRelease (isBeta, notesOnly) {
|
||||
if (args.dryRun) {
|
||||
const newVersion = await getNewVersion(true)
|
||||
console.log(newVersion)
|
||||
const newVersion = await getNewVersion(true);
|
||||
console.log(newVersion);
|
||||
} else {
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR)
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR);
|
||||
if (notesOnly) {
|
||||
const newVersion = await getNewVersion(true)
|
||||
const releaseNotes = await getReleaseNotes(currentBranch, newVersion)
|
||||
console.log(`Draft release notes are: \n${releaseNotes.text}`)
|
||||
const newVersion = await getNewVersion(true);
|
||||
const releaseNotes = await getReleaseNotes(currentBranch, newVersion);
|
||||
console.log(`Draft release notes are: \n${releaseNotes.text}`);
|
||||
} else {
|
||||
const changes = await changesToRelease(currentBranch)
|
||||
const changes = await changesToRelease(currentBranch);
|
||||
if (changes) {
|
||||
await verifyNewVersion()
|
||||
await createRelease(currentBranch, isBeta)
|
||||
await pushRelease(currentBranch)
|
||||
await runReleaseBuilds(currentBranch)
|
||||
await verifyNewVersion();
|
||||
await createRelease(currentBranch, isBeta);
|
||||
await pushRelease(currentBranch);
|
||||
await runReleaseBuilds(currentBranch);
|
||||
} else {
|
||||
console.log('There are no new changes to this branch since the last release, aborting release.')
|
||||
process.exit(1)
|
||||
console.log('There are no new changes to this branch since the last release, aborting release.');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prepareRelease(!args.stable, args.notesOnly)
|
||||
prepareRelease(!args.stable, args.notesOnly);
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
const temp = require('temp')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const childProcess = require('child_process')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils')
|
||||
const request = require('request')
|
||||
const semver = require('semver')
|
||||
const rootPackageJson = require('../../package.json')
|
||||
const temp = require('temp');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const childProcess = require('child_process');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils');
|
||||
const request = require('request');
|
||||
const semver = require('semver');
|
||||
const rootPackageJson = require('../../package.json');
|
||||
const octokit = require('@octokit/rest')({
|
||||
headers: { 'User-Agent': 'electron-npm-publisher' }
|
||||
})
|
||||
});
|
||||
|
||||
if (!process.env.ELECTRON_NPM_OTP) {
|
||||
console.error('Please set ELECTRON_NPM_OTP')
|
||||
process.exit(1)
|
||||
console.error('Please set ELECTRON_NPM_OTP');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let tempDir
|
||||
temp.track() // track and cleanup files at exit
|
||||
let tempDir;
|
||||
temp.track(); // track and cleanup files at exit
|
||||
|
||||
const files = [
|
||||
'cli.js',
|
||||
|
@ -25,7 +25,7 @@ const files = [
|
|||
'package.json',
|
||||
'README.md',
|
||||
'LICENSE'
|
||||
]
|
||||
];
|
||||
|
||||
const jsonFields = [
|
||||
'name',
|
||||
|
@ -35,58 +35,58 @@ const jsonFields = [
|
|||
'license',
|
||||
'author',
|
||||
'keywords'
|
||||
]
|
||||
];
|
||||
|
||||
let npmTag = ''
|
||||
let npmTag = '';
|
||||
|
||||
new Promise((resolve, reject) => {
|
||||
temp.mkdir('electron-npm', (err, dirPath) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(dirPath)
|
||||
resolve(dirPath);
|
||||
}
|
||||
})
|
||||
});
|
||||
})
|
||||
.then((dirPath) => {
|
||||
tempDir = dirPath
|
||||
tempDir = dirPath;
|
||||
// copy files from `/npm` to temp directory
|
||||
files.forEach((name) => {
|
||||
const noThirdSegment = name === 'README.md' || name === 'LICENSE'
|
||||
const noThirdSegment = name === 'README.md' || name === 'LICENSE';
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, name),
|
||||
fs.readFileSync(path.join(ELECTRON_DIR, noThirdSegment ? '' : 'npm', name))
|
||||
)
|
||||
})
|
||||
);
|
||||
});
|
||||
// copy from root package.json to temp/package.json
|
||||
const packageJson = require(path.join(tempDir, 'package.json'))
|
||||
const packageJson = require(path.join(tempDir, 'package.json'));
|
||||
jsonFields.forEach((fieldName) => {
|
||||
packageJson[fieldName] = rootPackageJson[fieldName]
|
||||
})
|
||||
packageJson[fieldName] = rootPackageJson[fieldName];
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2)
|
||||
)
|
||||
);
|
||||
|
||||
return octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: rootPackageJson.version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
})
|
||||
});
|
||||
})
|
||||
.then((releases) => {
|
||||
// download electron.d.ts from release
|
||||
const release = releases.data.find(
|
||||
(release) => release.tag_name === `v${rootPackageJson.version}`
|
||||
)
|
||||
);
|
||||
if (!release) {
|
||||
throw new Error(`cannot find release with tag v${rootPackageJson.version}`)
|
||||
throw new Error(`cannot find release with tag v${rootPackageJson.version}`);
|
||||
}
|
||||
return release
|
||||
return release;
|
||||
})
|
||||
.then((release) => {
|
||||
const tsdAsset = release.assets.find((asset) => asset.name === 'electron.d.ts')
|
||||
const tsdAsset = release.assets.find((asset) => asset.name === 'electron.d.ts');
|
||||
if (!tsdAsset) {
|
||||
throw new Error(`cannot find electron.d.ts from v${rootPackageJson.version} release assets`)
|
||||
throw new Error(`cannot find electron.d.ts from v${rootPackageJson.version} release assets`);
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
request.get({
|
||||
|
@ -97,78 +97,78 @@ new Promise((resolve, reject) => {
|
|||
}
|
||||
}, (err, response, body) => {
|
||||
if (err || response.statusCode !== 200) {
|
||||
reject(err || new Error('Cannot download electron.d.ts'))
|
||||
reject(err || new Error('Cannot download electron.d.ts'));
|
||||
} else {
|
||||
fs.writeFileSync(path.join(tempDir, 'electron.d.ts'), body)
|
||||
resolve(release)
|
||||
fs.writeFileSync(path.join(tempDir, 'electron.d.ts'), body);
|
||||
resolve(release);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(async (release) => {
|
||||
const currentBranch = await getCurrentBranch()
|
||||
const currentBranch = await getCurrentBranch();
|
||||
|
||||
if (release.tag_name.indexOf('nightly') > 0) {
|
||||
if (currentBranch === 'master') {
|
||||
// Nightlies get published to their own module, so master nightlies should be tagged as latest
|
||||
npmTag = 'latest'
|
||||
npmTag = 'latest';
|
||||
} else {
|
||||
npmTag = `nightly-${currentBranch}`
|
||||
npmTag = `nightly-${currentBranch}`;
|
||||
}
|
||||
|
||||
const currentJson = JSON.parse(fs.readFileSync(path.join(tempDir, 'package.json'), 'utf8'))
|
||||
currentJson.name = 'electron-nightly'
|
||||
rootPackageJson.name = 'electron-nightly'
|
||||
const currentJson = JSON.parse(fs.readFileSync(path.join(tempDir, 'package.json'), 'utf8'));
|
||||
currentJson.name = 'electron-nightly';
|
||||
rootPackageJson.name = 'electron-nightly';
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'package.json'),
|
||||
JSON.stringify(currentJson, null, 2)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
if (currentBranch === 'master') {
|
||||
// This should never happen, master releases should be nightly releases
|
||||
// this is here just-in-case
|
||||
npmTag = 'master'
|
||||
npmTag = 'master';
|
||||
} else if (!release.prerelease) {
|
||||
// Tag the release with a `2-0-x` style tag
|
||||
npmTag = currentBranch
|
||||
npmTag = currentBranch;
|
||||
} else {
|
||||
// Tag the release with a `beta-3-0-x` style tag
|
||||
npmTag = `beta-${currentBranch}`
|
||||
npmTag = `beta-${currentBranch}`;
|
||||
}
|
||||
}
|
||||
})
|
||||
.then(() => childProcess.execSync('npm pack', { cwd: tempDir }))
|
||||
.then(() => {
|
||||
// test that the package can install electron prebuilt from github release
|
||||
const tarballPath = path.join(tempDir, `${rootPackageJson.name}-${rootPackageJson.version}.tgz`)
|
||||
const tarballPath = path.join(tempDir, `${rootPackageJson.name}-${rootPackageJson.version}.tgz`);
|
||||
return new Promise((resolve, reject) => {
|
||||
childProcess.execSync(`npm install ${tarballPath} --force --silent`, {
|
||||
env: Object.assign({}, process.env, { electron_config_cache: tempDir }),
|
||||
cwd: tempDir
|
||||
})
|
||||
resolve(tarballPath)
|
||||
})
|
||||
});
|
||||
resolve(tarballPath);
|
||||
});
|
||||
})
|
||||
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag} --otp=${process.env.ELECTRON_NPM_OTP}`))
|
||||
.then(() => {
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString())
|
||||
const localVersion = rootPackageJson.version
|
||||
const parsedLocalVersion = semver.parse(localVersion)
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString());
|
||||
const localVersion = rootPackageJson.version;
|
||||
const parsedLocalVersion = semver.parse(localVersion);
|
||||
if (rootPackageJson.name === 'electron') {
|
||||
// We should only customly add dist tags for non-nightly releases where the package name is still
|
||||
// "electron"
|
||||
if (parsedLocalVersion.prerelease.length === 0 &&
|
||||
semver.gt(localVersion, currentTags.latest)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`);
|
||||
}
|
||||
if (parsedLocalVersion.prerelease[0] === 'beta' &&
|
||||
semver.gt(localVersion, currentTags.beta)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`Error: ${err}`)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(`Error: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,39 +1,39 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
string: ['tag', 'releaseID'],
|
||||
default: { releaseID: '' }
|
||||
})
|
||||
const path = require('path')
|
||||
const { execSync } = require('child_process')
|
||||
const { GitProcess } = require('dugite')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
|
||||
});
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { GitProcess } = require('dugite');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
function getLastBumpCommit (tag) {
|
||||
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString()
|
||||
return JSON.parse(data)
|
||||
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString();
|
||||
return JSON.parse(data);
|
||||
}
|
||||
|
||||
async function revertBumpCommit (tag) {
|
||||
const branch = await getCurrentBranch()
|
||||
const commitToRevert = getLastBumpCommit(tag).hash
|
||||
await GitProcess.exec(['revert', commitToRevert], ELECTRON_DIR)
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
|
||||
const branch = await getCurrentBranch();
|
||||
const commitToRevert = getLastBumpCommit(tag).hash;
|
||||
await GitProcess.exec(['revert', commitToRevert], ELECTRON_DIR);
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} successfully reverted release commit.`)
|
||||
console.log(`${pass} successfully reverted release commit.`);
|
||||
} else {
|
||||
const error = GitProcess.parseError(pushDetails.stderr)
|
||||
console.error(`${fail} could not push release commit: `, error)
|
||||
process.exit(1)
|
||||
const error = GitProcess.parseError(pushDetails.stderr);
|
||||
console.error(`${fail} could not push release commit: `, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,22 +43,22 @@ async function deleteDraft (releaseId, targetRepo) {
|
|||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: parseInt(releaseId, 10)
|
||||
})
|
||||
});
|
||||
if (!result.data.draft) {
|
||||
console.log(`${fail} published releases cannot be deleted.`)
|
||||
return false
|
||||
console.log(`${fail} published releases cannot be deleted.`);
|
||||
return false;
|
||||
} else {
|
||||
await octokit.repos.deleteRelease({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: result.data.id
|
||||
})
|
||||
});
|
||||
}
|
||||
console.log(`${pass} successfully deleted draft with id ${releaseId} from ${targetRepo}`)
|
||||
return true
|
||||
console.log(`${pass} successfully deleted draft with id ${releaseId} from ${targetRepo}`);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error(`${fail} couldn't delete draft with id ${releaseId} from ${targetRepo}: `, err)
|
||||
return false
|
||||
console.error(`${fail} couldn't delete draft with id ${releaseId} from ${targetRepo}: `, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,42 +68,42 @@ async function deleteTag (tag, targetRepo) {
|
|||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
ref: `tags/${tag}`
|
||||
})
|
||||
console.log(`${pass} successfully deleted tag ${tag} from ${targetRepo}`)
|
||||
});
|
||||
console.log(`${pass} successfully deleted tag ${tag} from ${targetRepo}`);
|
||||
} catch (err) {
|
||||
console.log(`${fail} couldn't delete tag ${tag} from ${targetRepo}: `, err)
|
||||
console.log(`${fail} couldn't delete tag ${tag} from ${targetRepo}: `, err);
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanReleaseArtifacts () {
|
||||
const releaseId = args.releaseID.length > 0 ? args.releaseID : null
|
||||
const isNightly = args.tag.includes('nightly')
|
||||
const releaseId = args.releaseID.length > 0 ? args.releaseID : null;
|
||||
const isNightly = args.tag.includes('nightly');
|
||||
|
||||
// try to revert commit regardless of tag and draft deletion status
|
||||
await revertBumpCommit(args.tag)
|
||||
await revertBumpCommit(args.tag);
|
||||
|
||||
if (releaseId) {
|
||||
if (isNightly) {
|
||||
await deleteDraft(releaseId, 'nightlies')
|
||||
await deleteDraft(releaseId, 'nightlies');
|
||||
|
||||
// We only need to delete the Electron tag since the
|
||||
// nightly tag is only created at publish-time.
|
||||
await deleteTag(args.tag, 'electron')
|
||||
await deleteTag(args.tag, 'electron');
|
||||
} else {
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, 'electron')
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, 'electron');
|
||||
// don't delete tag unless draft deleted successfully
|
||||
if (deletedElectronDraft) {
|
||||
await deleteTag(args.tag, 'electron')
|
||||
await deleteTag(args.tag, 'electron');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await Promise.all([
|
||||
deleteTag(args.tag, 'electron'),
|
||||
deleteTag(args.tag, 'nightlies')
|
||||
])
|
||||
]);
|
||||
}
|
||||
|
||||
console.log(`${pass} failed release artifact cleanup complete`)
|
||||
console.log(`${pass} failed release artifact cleanup complete`);
|
||||
}
|
||||
|
||||
cleanReleaseArtifacts()
|
||||
cleanReleaseArtifacts();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: [
|
||||
|
@ -10,87 +10,87 @@ const args = require('minimist')(process.argv.slice(2), {
|
|||
'verboseNugget'
|
||||
],
|
||||
default: { verboseNugget: false }
|
||||
})
|
||||
const fs = require('fs')
|
||||
const { execSync } = require('child_process')
|
||||
const nugget = require('nugget')
|
||||
const got = require('got')
|
||||
const pkg = require('../../package.json')
|
||||
const pkgVersion = `v${pkg.version}`
|
||||
const path = require('path')
|
||||
const sumchecker = require('sumchecker')
|
||||
const temp = require('temp').track()
|
||||
const { URL } = require('url')
|
||||
});
|
||||
const fs = require('fs');
|
||||
const { execSync } = require('child_process');
|
||||
const nugget = require('nugget');
|
||||
const got = require('got');
|
||||
const pkg = require('../../package.json');
|
||||
const pkgVersion = `v${pkg.version}`;
|
||||
const path = require('path');
|
||||
const sumchecker = require('sumchecker');
|
||||
const temp = require('temp').track();
|
||||
const { URL } = require('url');
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
let failureCount = 0
|
||||
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
let failureCount = 0;
|
||||
|
||||
async function getDraftRelease (version, skipValidation) {
|
||||
const releaseInfo = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: targetRepo
|
||||
})
|
||||
});
|
||||
|
||||
const versionToCheck = version || pkgVersion
|
||||
const versionToCheck = version || pkgVersion;
|
||||
const drafts = releaseInfo.data.filter(release => {
|
||||
return release.tag_name === versionToCheck && release.draft === true
|
||||
})
|
||||
return release.tag_name === versionToCheck && release.draft === true;
|
||||
});
|
||||
|
||||
const draft = drafts[0]
|
||||
const draft = drafts[0];
|
||||
if (!skipValidation) {
|
||||
failureCount = 0
|
||||
check(drafts.length === 1, 'one draft exists', true)
|
||||
failureCount = 0;
|
||||
check(drafts.length === 1, 'one draft exists', true);
|
||||
if (versionToCheck.indexOf('beta') > -1) {
|
||||
check(draft.prerelease, 'draft is a prerelease')
|
||||
check(draft.prerelease, 'draft is a prerelease');
|
||||
}
|
||||
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes')
|
||||
check((failureCount === 0), 'Draft release looks good to go.', true)
|
||||
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes');
|
||||
check((failureCount === 0), 'Draft release looks good to go.', true);
|
||||
}
|
||||
return draft
|
||||
return draft;
|
||||
}
|
||||
|
||||
async function validateReleaseAssets (release, validatingRelease) {
|
||||
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort()
|
||||
const extantAssets = release.assets.map(asset => asset.name).sort()
|
||||
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort()
|
||||
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
|
||||
const extantAssets = release.assets.map(asset => asset.name).sort();
|
||||
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort();
|
||||
|
||||
failureCount = 0
|
||||
failureCount = 0;
|
||||
requiredAssets.forEach(asset => {
|
||||
check(extantAssets.includes(asset), asset)
|
||||
})
|
||||
check((failureCount === 0), 'All required GitHub assets exist for release', true)
|
||||
check(extantAssets.includes(asset), asset);
|
||||
});
|
||||
check((failureCount === 0), 'All required GitHub assets exist for release', true);
|
||||
|
||||
if (!validatingRelease || !release.draft) {
|
||||
if (release.draft) {
|
||||
await verifyAssets(release)
|
||||
await verifyAssets(release);
|
||||
} else {
|
||||
await verifyShasums(downloadUrls)
|
||||
.catch(err => {
|
||||
console.log(`${fail} error verifyingShasums`, err)
|
||||
})
|
||||
console.log(`${fail} error verifyingShasums`, err);
|
||||
});
|
||||
}
|
||||
const s3Urls = s3UrlsForVersion(release.tag_name)
|
||||
await verifyShasums(s3Urls, true)
|
||||
const s3Urls = s3UrlsForVersion(release.tag_name);
|
||||
await verifyShasums(s3Urls, true);
|
||||
}
|
||||
}
|
||||
|
||||
function check (condition, statement, exitIfFail = false) {
|
||||
if (condition) {
|
||||
console.log(`${pass} ${statement}`)
|
||||
console.log(`${pass} ${statement}`);
|
||||
} else {
|
||||
failureCount++
|
||||
console.log(`${fail} ${statement}`)
|
||||
if (exitIfFail) process.exit(1)
|
||||
failureCount++;
|
||||
console.log(`${fail} ${statement}`);
|
||||
if (exitIfFail) process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -153,15 +153,15 @@ function assetsForVersion (version, validatingRelease) {
|
|||
`electron-${version}-win32-ia32-toolchain-profile.zip`,
|
||||
`electron-${version}-win32-x64-toolchain-profile.zip`,
|
||||
`electron-${version}-win32-arm64-toolchain-profile.zip`
|
||||
]
|
||||
];
|
||||
if (!validatingRelease) {
|
||||
patterns.push('SHASUMS256.txt')
|
||||
patterns.push('SHASUMS256.txt');
|
||||
}
|
||||
return patterns
|
||||
return patterns;
|
||||
}
|
||||
|
||||
function s3UrlsForVersion (version) {
|
||||
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/'
|
||||
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
|
||||
const patterns = [
|
||||
`${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`,
|
||||
`${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`,
|
||||
|
@ -173,66 +173,66 @@ function s3UrlsForVersion (version) {
|
|||
`${bucket}atom-shell/dist/${version}/SHASUMS.txt`,
|
||||
`${bucket}atom-shell/dist/${version}/SHASUMS256.txt`,
|
||||
`${bucket}atom-shell/dist/index.json`
|
||||
]
|
||||
return patterns
|
||||
];
|
||||
return patterns;
|
||||
}
|
||||
|
||||
function runScript (scriptName, scriptArgs, cwd) {
|
||||
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`
|
||||
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
|
||||
const scriptOptions = {
|
||||
encoding: 'UTF-8'
|
||||
}
|
||||
if (cwd) scriptOptions.cwd = cwd
|
||||
};
|
||||
if (cwd) scriptOptions.cwd = cwd;
|
||||
try {
|
||||
return execSync(scriptCommand, scriptOptions)
|
||||
return execSync(scriptCommand, scriptOptions);
|
||||
} catch (err) {
|
||||
console.log(`${fail} Error running ${scriptName}`, err)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error running ${scriptName}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function uploadNodeShasums () {
|
||||
console.log('Uploading Node SHASUMS file to S3.')
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py')
|
||||
runScript(scriptPath, ['-v', pkgVersion])
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to S3.`)
|
||||
console.log('Uploading Node SHASUMS file to S3.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
|
||||
runScript(scriptPath, ['-v', pkgVersion]);
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to S3.`);
|
||||
}
|
||||
|
||||
function uploadIndexJson () {
|
||||
console.log('Uploading index.json to S3.')
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py')
|
||||
runScript(scriptPath, [pkgVersion])
|
||||
console.log(`${pass} Done uploading index.json to S3.`)
|
||||
console.log('Uploading index.json to S3.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
|
||||
runScript(scriptPath, [pkgVersion]);
|
||||
console.log(`${pass} Done uploading index.json to S3.`);
|
||||
}
|
||||
|
||||
async function createReleaseShasums (release) {
|
||||
const fileName = 'SHASUMS256.txt'
|
||||
const existingAssets = release.assets.filter(asset => asset.name === fileName)
|
||||
const fileName = 'SHASUMS256.txt';
|
||||
const existingAssets = release.assets.filter(asset => asset.name === fileName);
|
||||
if (existingAssets.length > 0) {
|
||||
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`)
|
||||
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`);
|
||||
await octokit.repos.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err)
|
||||
})
|
||||
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err);
|
||||
});
|
||||
}
|
||||
console.log(`Creating and uploading the release ${fileName}.`)
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py')
|
||||
const checksums = runScript(scriptPath, ['-v', pkgVersion])
|
||||
console.log(`Creating and uploading the release ${fileName}.`);
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py');
|
||||
const checksums = runScript(scriptPath, ['-v', pkgVersion]);
|
||||
|
||||
console.log(`${pass} Generated release SHASUMS.`)
|
||||
const filePath = await saveShaSumFile(checksums, fileName)
|
||||
console.log(`${pass} Generated release SHASUMS.`);
|
||||
const filePath = await saveShaSumFile(checksums, fileName);
|
||||
|
||||
console.log(`${pass} Created ${fileName} file.`)
|
||||
await uploadShasumFile(filePath, fileName, release.id)
|
||||
console.log(`${pass} Created ${fileName} file.`);
|
||||
await uploadShasumFile(filePath, fileName, release.id);
|
||||
|
||||
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`)
|
||||
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
|
||||
}
|
||||
|
||||
async function uploadShasumFile (filePath, fileName, releaseId) {
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
|
||||
return octokit.repos.uploadReleaseAsset({
|
||||
url: uploadUrl,
|
||||
headers: {
|
||||
|
@ -242,29 +242,29 @@ async function uploadShasumFile (filePath, fileName, releaseId) {
|
|||
file: fs.createReadStream(filePath),
|
||||
name: fileName
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function saveShaSumFile (checksums, fileName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
temp.open(fileName, (err, info) => {
|
||||
if (err) {
|
||||
console.log(`${fail} Could not create ${fileName} file`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Could not create ${fileName} file`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
fs.writeFileSync(info.fd, checksums)
|
||||
fs.writeFileSync(info.fd, checksums);
|
||||
fs.close(info.fd, (err) => {
|
||||
if (err) {
|
||||
console.log(`${fail} Could close ${fileName} file`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Could close ${fileName} file`);
|
||||
process.exit(1);
|
||||
}
|
||||
resolve(info.path)
|
||||
})
|
||||
resolve(info.path);
|
||||
});
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function publishRelease (release) {
|
||||
|
@ -275,34 +275,34 @@ async function publishRelease (release) {
|
|||
tag_name: release.tag_name,
|
||||
draft: false
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error publishing release:`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error publishing release:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async function makeRelease (releaseToValidate) {
|
||||
if (releaseToValidate) {
|
||||
if (releaseToValidate === true) {
|
||||
releaseToValidate = pkgVersion
|
||||
releaseToValidate = pkgVersion;
|
||||
} else {
|
||||
console.log('Release to validate !=== true')
|
||||
console.log('Release to validate !=== true');
|
||||
}
|
||||
console.log(`Validating release ${releaseToValidate}`)
|
||||
const release = await getDraftRelease(releaseToValidate)
|
||||
await validateReleaseAssets(release, true)
|
||||
console.log(`Validating release ${releaseToValidate}`);
|
||||
const release = await getDraftRelease(releaseToValidate);
|
||||
await validateReleaseAssets(release, true);
|
||||
} else {
|
||||
let draftRelease = await getDraftRelease()
|
||||
uploadNodeShasums()
|
||||
uploadIndexJson()
|
||||
let draftRelease = await getDraftRelease();
|
||||
uploadNodeShasums();
|
||||
uploadIndexJson();
|
||||
|
||||
await createReleaseShasums(draftRelease)
|
||||
await createReleaseShasums(draftRelease);
|
||||
|
||||
// Fetch latest version of release before verifying
|
||||
draftRelease = await getDraftRelease(pkgVersion, true)
|
||||
await validateReleaseAssets(draftRelease)
|
||||
await publishRelease(draftRelease)
|
||||
draftRelease = await getDraftRelease(pkgVersion, true);
|
||||
await validateReleaseAssets(draftRelease);
|
||||
await publishRelease(draftRelease);
|
||||
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
|
||||
'"npm run publish-to-npm" to publish release to npm.')
|
||||
'"npm run publish-to-npm" to publish release to npm.');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,19 +310,19 @@ async function makeTempDir () {
|
|||
return new Promise((resolve, reject) => {
|
||||
temp.mkdir('electron-publish', (err, dirPath) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(dirPath)
|
||||
resolve(dirPath);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function verifyAssets (release) {
|
||||
const downloadDir = await makeTempDir()
|
||||
const downloadDir = await makeTempDir();
|
||||
|
||||
console.log('Downloading files from GitHub to verify shasums')
|
||||
const shaSumFile = 'SHASUMS256.txt'
|
||||
console.log('Downloading files from GitHub to verify shasums');
|
||||
const shaSumFile = 'SHASUMS256.txt';
|
||||
|
||||
let filesToCheck = await Promise.all(release.assets.map(async asset => {
|
||||
const requestOptions = await octokit.repos.getReleaseAsset.endpoint({
|
||||
|
@ -332,26 +332,26 @@ async function verifyAssets (release) {
|
|||
headers: {
|
||||
Accept: 'application/octet-stream'
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const { url, headers } = requestOptions
|
||||
headers.authorization = `token ${process.env.ELECTRON_GITHUB_TOKEN}`
|
||||
const { url, headers } = requestOptions;
|
||||
headers.authorization = `token ${process.env.ELECTRON_GITHUB_TOKEN}`;
|
||||
|
||||
const response = await got(url, {
|
||||
followRedirect: false,
|
||||
method: 'HEAD',
|
||||
headers
|
||||
})
|
||||
});
|
||||
|
||||
await downloadFiles(response.headers.location, downloadDir, asset.name)
|
||||
return asset.name
|
||||
await downloadFiles(response.headers.location, downloadDir, asset.name);
|
||||
return asset.name;
|
||||
})).catch(err => {
|
||||
console.log(`${fail} Error downloading files from GitHub`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error downloading files from GitHub`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile)
|
||||
let checkerOpts
|
||||
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile);
|
||||
let checkerOpts;
|
||||
await validateChecksums({
|
||||
algorithm: 'sha256',
|
||||
filesToCheck,
|
||||
|
@ -359,71 +359,71 @@ async function verifyAssets (release) {
|
|||
shaSumFile,
|
||||
checkerOpts,
|
||||
fileSource: 'GitHub'
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
function downloadFiles (urls, directory, targetName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nuggetOpts = { dir: directory }
|
||||
nuggetOpts.quiet = !args.verboseNugget
|
||||
if (targetName) nuggetOpts.target = targetName
|
||||
const nuggetOpts = { dir: directory };
|
||||
nuggetOpts.quiet = !args.verboseNugget;
|
||||
if (targetName) nuggetOpts.target = targetName;
|
||||
|
||||
nugget(urls, nuggetOpts, (err) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
console.log(`${pass} all files downloaded successfully!`)
|
||||
resolve()
|
||||
console.log(`${pass} all files downloaded successfully!`);
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function verifyShasums (urls, isS3) {
|
||||
const fileSource = isS3 ? 'S3' : 'GitHub'
|
||||
console.log(`Downloading files from ${fileSource} to verify shasums`)
|
||||
const downloadDir = await makeTempDir()
|
||||
let filesToCheck = []
|
||||
const fileSource = isS3 ? 'S3' : 'GitHub';
|
||||
console.log(`Downloading files from ${fileSource} to verify shasums`);
|
||||
const downloadDir = await makeTempDir();
|
||||
let filesToCheck = [];
|
||||
try {
|
||||
if (!isS3) {
|
||||
await downloadFiles(urls, downloadDir)
|
||||
await downloadFiles(urls, downloadDir);
|
||||
filesToCheck = urls.map(url => {
|
||||
const currentUrl = new URL(url)
|
||||
return path.basename(currentUrl.pathname)
|
||||
}).filter(file => file.indexOf('SHASUMS') === -1)
|
||||
const currentUrl = new URL(url);
|
||||
return path.basename(currentUrl.pathname);
|
||||
}).filter(file => file.indexOf('SHASUMS') === -1);
|
||||
} else {
|
||||
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`
|
||||
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`;
|
||||
await Promise.all(urls.map(async (url) => {
|
||||
const currentUrl = new URL(url)
|
||||
const dirname = path.dirname(currentUrl.pathname)
|
||||
const filename = path.basename(currentUrl.pathname)
|
||||
const s3VersionPathIdx = dirname.indexOf(s3VersionPath)
|
||||
const currentUrl = new URL(url);
|
||||
const dirname = path.dirname(currentUrl.pathname);
|
||||
const filename = path.basename(currentUrl.pathname);
|
||||
const s3VersionPathIdx = dirname.indexOf(s3VersionPath);
|
||||
if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
|
||||
if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
|
||||
filesToCheck.push(filename)
|
||||
filesToCheck.push(filename);
|
||||
}
|
||||
await downloadFiles(url, downloadDir)
|
||||
await downloadFiles(url, downloadDir);
|
||||
} else {
|
||||
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length)
|
||||
const fileDirectory = path.join(downloadDir, subDirectory)
|
||||
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length);
|
||||
const fileDirectory = path.join(downloadDir, subDirectory);
|
||||
try {
|
||||
fs.statSync(fileDirectory)
|
||||
fs.statSync(fileDirectory);
|
||||
} catch (err) {
|
||||
fs.mkdirSync(fileDirectory)
|
||||
fs.mkdirSync(fileDirectory);
|
||||
}
|
||||
filesToCheck.push(path.join(subDirectory, filename))
|
||||
await downloadFiles(url, fileDirectory)
|
||||
filesToCheck.push(path.join(subDirectory, filename));
|
||||
await downloadFiles(url, fileDirectory);
|
||||
}
|
||||
}))
|
||||
}));
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`${fail} Error downloading files from ${fileSource}`, err)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error downloading files from ${fileSource}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`)
|
||||
let checkerOpts
|
||||
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`);
|
||||
let checkerOpts;
|
||||
if (isS3) {
|
||||
checkerOpts = { defaultTextEncoding: 'binary' }
|
||||
checkerOpts = { defaultTextEncoding: 'binary' };
|
||||
}
|
||||
|
||||
await validateChecksums({
|
||||
|
@ -433,7 +433,7 @@ async function verifyShasums (urls, isS3) {
|
|||
shaSumFile: 'SHASUMS256.txt',
|
||||
checkerOpts,
|
||||
fileSource
|
||||
})
|
||||
});
|
||||
|
||||
if (isS3) {
|
||||
await validateChecksums({
|
||||
|
@ -443,37 +443,37 @@ async function verifyShasums (urls, isS3) {
|
|||
shaSumFile: 'SHASUMS.txt',
|
||||
checkerOpts,
|
||||
fileSource
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateChecksums (validationArgs) {
|
||||
console.log(`Validating checksums for files from ${validationArgs.fileSource} ` +
|
||||
`against ${validationArgs.shaSumFile}.`)
|
||||
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile)
|
||||
`against ${validationArgs.shaSumFile}.`);
|
||||
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile);
|
||||
const checker = new sumchecker.ChecksumValidator(validationArgs.algorithm,
|
||||
shaSumFilePath, validationArgs.checkerOpts)
|
||||
shaSumFilePath, validationArgs.checkerOpts);
|
||||
await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck)
|
||||
.catch(err => {
|
||||
if (err instanceof sumchecker.ChecksumMismatchError) {
|
||||
console.error(`${fail} The checksum of ${err.filename} from ` +
|
||||
`${validationArgs.fileSource} did not match the shasum in ` +
|
||||
`${validationArgs.shaSumFile}`)
|
||||
`${validationArgs.shaSumFile}`);
|
||||
} else if (err instanceof sumchecker.ChecksumParseError) {
|
||||
console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` +
|
||||
`from ${validationArgs.fileSource} could not be parsed.`, err)
|
||||
`from ${validationArgs.fileSource} could not be parsed.`, err);
|
||||
} else if (err instanceof sumchecker.NoChecksumFoundError) {
|
||||
console.error(`${fail} The file ${err.filename} from ` +
|
||||
`${validationArgs.fileSource} was not in the shasum file ` +
|
||||
`${validationArgs.shaSumFile}.`)
|
||||
`${validationArgs.shaSumFile}.`);
|
||||
} else {
|
||||
console.error(`${fail} Error matching files from ` +
|
||||
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err)
|
||||
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err);
|
||||
}
|
||||
process.exit(1)
|
||||
})
|
||||
process.exit(1);
|
||||
});
|
||||
console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
|
||||
`shasums defined in ${validationArgs.shaSumFile}.`)
|
||||
`shasums defined in ${validationArgs.shaSumFile}.`);
|
||||
}
|
||||
|
||||
makeRelease(args.validateRelease)
|
||||
makeRelease(args.validateRelease);
|
||||
|
|
|
@ -1,40 +1,40 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const fs = require('fs')
|
||||
const fs = require('fs');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
if (process.argv.length < 6) {
|
||||
console.log('Usage: upload-to-github filePath fileName releaseId')
|
||||
process.exit(1)
|
||||
console.log('Usage: upload-to-github filePath fileName releaseId');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filePath = process.argv[2]
|
||||
const fileName = process.argv[3]
|
||||
const releaseId = process.argv[4]
|
||||
const releaseVersion = process.argv[5]
|
||||
const filePath = process.argv[2];
|
||||
const fileName = process.argv[3];
|
||||
const releaseId = process.argv[4];
|
||||
const releaseVersion = process.argv[5];
|
||||
|
||||
const getHeaders = (filePath, fileName) => {
|
||||
const extension = fileName.split('.').pop()
|
||||
const size = fs.statSync(filePath).size
|
||||
const extension = fileName.split('.').pop();
|
||||
const size = fs.statSync(filePath).size;
|
||||
const options = {
|
||||
json: 'text/json',
|
||||
zip: 'application/zip',
|
||||
txt: 'text/plain',
|
||||
ts: 'application/typescript'
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
'content-type': options[extension],
|
||||
'content-length': size
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
|
||||
let retry = 0
|
||||
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
|
||||
let retry = 0;
|
||||
|
||||
function uploadToGitHub () {
|
||||
octokit.repos.uploadReleaseAsset({
|
||||
|
@ -43,12 +43,12 @@ function uploadToGitHub () {
|
|||
file: fs.createReadStream(filePath),
|
||||
name: fileName
|
||||
}).then(() => {
|
||||
console.log(`Successfully uploaded ${fileName} to GitHub.`)
|
||||
process.exit()
|
||||
console.log(`Successfully uploaded ${fileName} to GitHub.`);
|
||||
process.exit();
|
||||
}).catch((err) => {
|
||||
if (retry < 4) {
|
||||
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err)
|
||||
retry++
|
||||
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err);
|
||||
retry++;
|
||||
|
||||
octokit.repos.listAssetsForRelease({
|
||||
owner: 'electron',
|
||||
|
@ -56,31 +56,31 @@ function uploadToGitHub () {
|
|||
release_id: releaseId,
|
||||
per_page: 100
|
||||
}).then(assets => {
|
||||
console.log('Got list of assets for existing release:')
|
||||
console.log(JSON.stringify(assets.data, null, ' '))
|
||||
const existingAssets = assets.data.filter(asset => asset.name === fileName)
|
||||
console.log('Got list of assets for existing release:');
|
||||
console.log(JSON.stringify(assets.data, null, ' '));
|
||||
const existingAssets = assets.data.filter(asset => asset.name === fileName);
|
||||
|
||||
if (existingAssets.length > 0) {
|
||||
console.log(`${fileName} already exists; will delete before retrying upload.`)
|
||||
console.log(`${fileName} already exists; will delete before retrying upload.`);
|
||||
octokit.repos.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
}).catch((deleteErr) => {
|
||||
console.log(`Failed to delete existing asset ${fileName}. Error was:`, deleteErr)
|
||||
}).then(uploadToGitHub)
|
||||
console.log(`Failed to delete existing asset ${fileName}. Error was:`, deleteErr);
|
||||
}).then(uploadToGitHub);
|
||||
} else {
|
||||
console.log(`Current asset ${fileName} not found in existing assets; retrying upload.`)
|
||||
uploadToGitHub()
|
||||
console.log(`Current asset ${fileName} not found in existing assets; retrying upload.`);
|
||||
uploadToGitHub();
|
||||
}
|
||||
}).catch((getReleaseErr) => {
|
||||
console.log('Fatal: Unable to get current release assets via getRelease! Error was:', getReleaseErr)
|
||||
})
|
||||
console.log('Fatal: Unable to get current release assets via getRelease! Error was:', getReleaseErr);
|
||||
});
|
||||
} else {
|
||||
console.log(`Error retrying uploading ${fileName} to GitHub:`, err)
|
||||
process.exitCode = 1
|
||||
console.log(`Error retrying uploading ${fileName} to GitHub:`, err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
uploadToGitHub()
|
||||
uploadToGitHub();
|
||||
|
|
|
@ -1,26 +1,26 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const fs = require('fs')
|
||||
const semver = require('semver')
|
||||
const path = require('path')
|
||||
const { promisify } = require('util')
|
||||
const minimist = require('minimist')
|
||||
const { GitProcess } = require('dugite');
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const minimist = require('minimist');
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const versionUtils = require('./version-utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
const versionUtils = require('./version-utils');
|
||||
|
||||
const writeFile = promisify(fs.writeFile)
|
||||
const readFile = promisify(fs.readFile)
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
const readFile = promisify(fs.readFile);
|
||||
|
||||
function parseCommandLine () {
|
||||
let help
|
||||
let help;
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
string: ['bump', 'version'],
|
||||
boolean: ['dryRun', 'help'],
|
||||
alias: { version: ['v'] },
|
||||
unknown: arg => { help = true }
|
||||
})
|
||||
unknown: arg => { help = true; }
|
||||
});
|
||||
if (help || opts.help || !opts.bump) {
|
||||
console.log(`
|
||||
Bump release version number. Possible arguments:\n
|
||||
|
@ -28,30 +28,30 @@ function parseCommandLine () {
|
|||
--version={version} to set version number directly\n
|
||||
--dryRun to print the next version without updating files
|
||||
Note that you can use both --bump and --stable simultaneously.
|
||||
`)
|
||||
process.exit(0)
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
return opts
|
||||
return opts;
|
||||
}
|
||||
|
||||
// run the script
|
||||
async function main () {
|
||||
const opts = parseCommandLine()
|
||||
const currentVersion = await versionUtils.getElectronVersion()
|
||||
const version = await nextVersion(opts.bump, currentVersion)
|
||||
const opts = parseCommandLine();
|
||||
const currentVersion = await versionUtils.getElectronVersion();
|
||||
const version = await nextVersion(opts.bump, currentVersion);
|
||||
|
||||
const parsed = semver.parse(version)
|
||||
const parsed = semver.parse(version);
|
||||
const components = {
|
||||
major: parsed.major,
|
||||
minor: parsed.minor,
|
||||
patch: parsed.patch,
|
||||
pre: parsed.prerelease
|
||||
}
|
||||
};
|
||||
|
||||
// print would-be new version and exit early
|
||||
if (opts.dryRun) {
|
||||
console.log(`new version number would be: ${version}\n`)
|
||||
return 0
|
||||
console.log(`new version number would be: ${version}\n`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// update all version-related files
|
||||
|
@ -59,12 +59,12 @@ async function main () {
|
|||
updateVersion(version),
|
||||
updatePackageJSON(version),
|
||||
updateWinRC(components)
|
||||
])
|
||||
]);
|
||||
|
||||
// commit all updated version-related files
|
||||
await commitVersionBump(version)
|
||||
await commitVersionBump(version);
|
||||
|
||||
console.log(`Bumped to version: ${version}`)
|
||||
console.log(`Bumped to version: ${version}`);
|
||||
}
|
||||
|
||||
// get next version for release based on [nightly, beta, stable]
|
||||
|
@ -72,81 +72,81 @@ async function nextVersion (bumpType, version) {
|
|||
if (versionUtils.isNightly(version) || versionUtils.isBeta(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = await versionUtils.nextNightly(version)
|
||||
break
|
||||
version = await versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'beta':
|
||||
version = await versionUtils.nextBeta(version)
|
||||
break
|
||||
version = await versionUtils.nextBeta(version);
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.valid(semver.coerce(version))
|
||||
break
|
||||
version = semver.valid(semver.coerce(version));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.')
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else if (versionUtils.isStable(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = versionUtils.nextNightly(version)
|
||||
break
|
||||
version = versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'beta':
|
||||
throw new Error('Cannot bump to beta from stable.')
|
||||
throw new Error('Cannot bump to beta from stable.');
|
||||
case 'minor':
|
||||
version = semver.inc(version, 'minor')
|
||||
break
|
||||
version = semver.inc(version, 'minor');
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.inc(version, 'patch')
|
||||
break
|
||||
version = semver.inc(version, 'patch');
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.')
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid current version: ${version}`)
|
||||
throw new Error(`Invalid current version: ${version}`);
|
||||
}
|
||||
return version
|
||||
return version;
|
||||
}
|
||||
|
||||
// update VERSION file with latest release info
|
||||
async function updateVersion (version) {
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
|
||||
await writeFile(versionPath, version, 'utf8')
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION');
|
||||
await writeFile(versionPath, version, 'utf8');
|
||||
}
|
||||
|
||||
// update package metadata files with new version
|
||||
async function updatePackageJSON (version) {
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'package.json')
|
||||
const file = require(filePath)
|
||||
file.version = version
|
||||
await writeFile(filePath, JSON.stringify(file, null, 2))
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'package.json');
|
||||
const file = require(filePath);
|
||||
file.version = version;
|
||||
await writeFile(filePath, JSON.stringify(file, null, 2));
|
||||
}
|
||||
|
||||
// push bump commit to release branch
|
||||
async function commitVersionBump (version) {
|
||||
const gitArgs = ['commit', '-a', '-m', `Bump v${version}`, '-n']
|
||||
await GitProcess.exec(gitArgs, ELECTRON_DIR)
|
||||
const gitArgs = ['commit', '-a', '-m', `Bump v${version}`, '-n'];
|
||||
await GitProcess.exec(gitArgs, ELECTRON_DIR);
|
||||
}
|
||||
|
||||
// updates atom.rc file with new semver values
|
||||
async function updateWinRC (components) {
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'shell', 'browser', 'resources', 'win', 'atom.rc')
|
||||
const data = await readFile(filePath, 'utf8')
|
||||
const arr = data.split('\n')
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'shell', 'browser', 'resources', 'win', 'atom.rc');
|
||||
const data = await readFile(filePath, 'utf8');
|
||||
const arr = data.split('\n');
|
||||
arr.forEach((line, idx) => {
|
||||
if (line.includes('FILEVERSION')) {
|
||||
arr[idx] = ` FILEVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
|
||||
arr[idx + 1] = ` PRODUCTVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
|
||||
arr[idx] = ` FILEVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`;
|
||||
arr[idx + 1] = ` PRODUCTVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`;
|
||||
} else if (line.includes('FileVersion')) {
|
||||
arr[idx] = ` VALUE "FileVersion", "${versionUtils.makeVersion(components, '.')}"`
|
||||
arr[idx + 5] = ` VALUE "ProductVersion", "${versionUtils.makeVersion(components, '.')}"`
|
||||
arr[idx] = ` VALUE "FileVersion", "${versionUtils.makeVersion(components, '.')}"`;
|
||||
arr[idx + 5] = ` VALUE "ProductVersion", "${versionUtils.makeVersion(components, '.')}"`;
|
||||
}
|
||||
})
|
||||
await writeFile(filePath, arr.join('\n'))
|
||||
});
|
||||
await writeFile(filePath, arr.join('\n'));
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((error) => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { nextVersion }
|
||||
module.exports = { nextVersion };
|
||||
|
|
|
@ -1,90 +1,90 @@
|
|||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const semver = require('semver')
|
||||
const { GitProcess } = require('dugite')
|
||||
const { promisify } = require('util')
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const { GitProcess } = require('dugite');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
|
||||
const readFile = promisify(fs.readFile)
|
||||
const readFile = promisify(fs.readFile);
|
||||
|
||||
const preType = {
|
||||
NONE: 'none',
|
||||
PARTIAL: 'partial',
|
||||
FULL: 'full'
|
||||
}
|
||||
};
|
||||
|
||||
const getCurrentDate = () => {
|
||||
const d = new Date()
|
||||
const dd = `${d.getDate()}`.padStart(2, '0')
|
||||
const mm = `${d.getMonth() + 1}`.padStart(2, '0')
|
||||
const yyyy = d.getFullYear()
|
||||
return `${yyyy}${mm}${dd}`
|
||||
}
|
||||
const d = new Date();
|
||||
const dd = `${d.getDate()}`.padStart(2, '0');
|
||||
const mm = `${d.getMonth() + 1}`.padStart(2, '0');
|
||||
const yyyy = d.getFullYear();
|
||||
return `${yyyy}${mm}${dd}`;
|
||||
};
|
||||
|
||||
const isNightly = v => v.includes('nightly')
|
||||
const isBeta = v => v.includes('beta')
|
||||
const isNightly = v => v.includes('nightly');
|
||||
const isBeta = v => v.includes('beta');
|
||||
const isStable = v => {
|
||||
const parsed = semver.parse(v)
|
||||
return !!(parsed && parsed.prerelease.length === 0)
|
||||
}
|
||||
const parsed = semver.parse(v);
|
||||
return !!(parsed && parsed.prerelease.length === 0);
|
||||
};
|
||||
|
||||
const makeVersion = (components, delim, pre = preType.NONE) => {
|
||||
let version = [components.major, components.minor, components.patch].join(delim)
|
||||
let version = [components.major, components.minor, components.patch].join(delim);
|
||||
if (pre === preType.PARTIAL) {
|
||||
version += `${delim}${components.pre[1] || 0}`
|
||||
version += `${delim}${components.pre[1] || 0}`;
|
||||
} else if (pre === preType.FULL) {
|
||||
version += `-${components.pre[0]}${delim}${components.pre[1]}`
|
||||
version += `-${components.pre[0]}${delim}${components.pre[1]}`;
|
||||
}
|
||||
return version
|
||||
}
|
||||
return version;
|
||||
};
|
||||
|
||||
async function nextBeta (v) {
|
||||
const next = semver.coerce(semver.clean(v))
|
||||
const next = semver.coerce(semver.clean(v));
|
||||
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR)
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '')
|
||||
tags.sort((t1, t2) => semver.gt(t1, t2))
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR);
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
|
||||
tags.sort((t1, t2) => semver.gt(t1, t2));
|
||||
|
||||
// increment the latest existing beta tag or start at beta.1 if it's a new beta line
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease')
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease');
|
||||
}
|
||||
|
||||
async function getElectronVersion () {
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
|
||||
const version = await readFile(versionPath, 'utf8')
|
||||
return version.trim()
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION');
|
||||
const version = await readFile(versionPath, 'utf8');
|
||||
return version.trim();
|
||||
}
|
||||
|
||||
async function nextNightly (v) {
|
||||
let next = semver.valid(semver.coerce(v))
|
||||
const pre = `nightly.${getCurrentDate()}`
|
||||
let next = semver.valid(semver.coerce(v));
|
||||
const pre = `nightly.${getCurrentDate()}`;
|
||||
|
||||
const branch = (await GitProcess.exec(['rev-parse', '--abbrev-ref', 'HEAD'], ELECTRON_DIR)).stdout.trim()
|
||||
const branch = (await GitProcess.exec(['rev-parse', '--abbrev-ref', 'HEAD'], ELECTRON_DIR)).stdout.trim();
|
||||
if (branch === 'master') {
|
||||
next = semver.inc(await getLastMajorForMaster(), 'major')
|
||||
next = semver.inc(await getLastMajorForMaster(), 'major');
|
||||
} else if (isStable(v)) {
|
||||
next = semver.inc(next, 'patch')
|
||||
next = semver.inc(next, 'patch');
|
||||
}
|
||||
|
||||
return `${next}-${pre}`
|
||||
return `${next}-${pre}`;
|
||||
}
|
||||
|
||||
async function getLastMajorForMaster () {
|
||||
let branchNames
|
||||
const result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]*-x-y'], ELECTRON_DIR)
|
||||
let branchNames;
|
||||
const result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]*-x-y'], ELECTRON_DIR);
|
||||
if (result.exitCode === 0) {
|
||||
branchNames = result.stdout.trim().split('\n')
|
||||
const filtered = branchNames.map(b => b.replace('origin/', ''))
|
||||
return getNextReleaseBranch(filtered)
|
||||
branchNames = result.stdout.trim().split('\n');
|
||||
const filtered = branchNames.map(b => b.replace('origin/', ''));
|
||||
return getNextReleaseBranch(filtered);
|
||||
} else {
|
||||
throw new Error('Release branches could not be fetched.')
|
||||
throw new Error('Release branches could not be fetched.');
|
||||
}
|
||||
}
|
||||
|
||||
function getNextReleaseBranch (branches) {
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'))
|
||||
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2)
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'));
|
||||
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@ -96,4 +96,4 @@ module.exports = {
|
|||
getElectronVersion,
|
||||
nextNightly,
|
||||
preType
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,76 +1,76 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require('child_process')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const { hashElement } = require('folder-hash')
|
||||
const path = require('path')
|
||||
const unknownFlags = []
|
||||
const childProcess = require('child_process');
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const { hashElement } = require('folder-hash');
|
||||
const path = require('path');
|
||||
const unknownFlags = [];
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
const args = require('minimist')(process.argv, {
|
||||
string: ['runners', 'target'],
|
||||
boolean: ['buildNativeTests'],
|
||||
unknown: arg => unknownFlags.push(arg)
|
||||
})
|
||||
});
|
||||
|
||||
const unknownArgs = []
|
||||
const unknownArgs = [];
|
||||
for (const flag of unknownFlags) {
|
||||
unknownArgs.push(flag)
|
||||
const onlyFlag = flag.replace(/^-+/, '')
|
||||
unknownArgs.push(flag);
|
||||
const onlyFlag = flag.replace(/^-+/, '');
|
||||
if (args[onlyFlag]) {
|
||||
unknownArgs.push(args[onlyFlag])
|
||||
unknownArgs.push(args[onlyFlag]);
|
||||
}
|
||||
}
|
||||
|
||||
const utils = require('./lib/utils')
|
||||
const { YARN_VERSION } = require('./yarn')
|
||||
const utils = require('./lib/utils');
|
||||
const { YARN_VERSION } = require('./yarn');
|
||||
|
||||
const BASE = path.resolve(__dirname, '../..')
|
||||
const NPM_CMD = process.platform === 'win32' ? 'npm.cmd' : 'npm'
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx'
|
||||
const BASE = path.resolve(__dirname, '../..');
|
||||
const NPM_CMD = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
|
||||
const runners = new Map([
|
||||
['main', { description: 'Main process specs', run: runMainProcessElectronTests }],
|
||||
['remote', { description: 'Remote based specs', run: runRemoteBasedElectronTests }],
|
||||
['native', { description: 'Native specs', run: runNativeElectronTests }]
|
||||
])
|
||||
]);
|
||||
|
||||
const specHashPath = path.resolve(__dirname, '../spec/.hash')
|
||||
const specHashPath = path.resolve(__dirname, '../spec/.hash');
|
||||
|
||||
let runnersToRun = null
|
||||
let runnersToRun = null;
|
||||
if (args.runners) {
|
||||
runnersToRun = args.runners.split(',')
|
||||
runnersToRun = args.runners.split(',');
|
||||
if (!runnersToRun.every(r => [...runners.keys()].includes(r))) {
|
||||
console.log(`${fail} ${runnersToRun} must be a subset of [${[...runners.keys()].join(' | ')}]`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} ${runnersToRun} must be a subset of [${[...runners.keys()].join(' | ')}]`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('Only running:', runnersToRun)
|
||||
console.log('Only running:', runnersToRun);
|
||||
} else {
|
||||
console.log(`Triggering runners: ${[...runners.keys()].join(', ')}`)
|
||||
console.log(`Triggering runners: ${[...runners.keys()].join(', ')}`);
|
||||
}
|
||||
|
||||
async function main () {
|
||||
const [lastSpecHash, lastSpecInstallHash] = loadLastSpecHash()
|
||||
const [currentSpecHash, currentSpecInstallHash] = await getSpecHash()
|
||||
const [lastSpecHash, lastSpecInstallHash] = loadLastSpecHash();
|
||||
const [currentSpecHash, currentSpecInstallHash] = await getSpecHash();
|
||||
const somethingChanged = (currentSpecHash !== lastSpecHash) ||
|
||||
(lastSpecInstallHash !== currentSpecInstallHash)
|
||||
(lastSpecInstallHash !== currentSpecInstallHash);
|
||||
|
||||
if (somethingChanged) {
|
||||
await installSpecModules(path.resolve(__dirname, '..', 'spec'))
|
||||
await installSpecModules(path.resolve(__dirname, '..', 'spec-main'))
|
||||
await getSpecHash().then(saveSpecHash)
|
||||
await installSpecModules(path.resolve(__dirname, '..', 'spec'));
|
||||
await installSpecModules(path.resolve(__dirname, '..', 'spec-main'));
|
||||
await getSpecHash().then(saveSpecHash);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.resolve(__dirname, '../electron.d.ts'))) {
|
||||
console.log('Generating electron.d.ts as it is missing')
|
||||
generateTypeDefinitions()
|
||||
console.log('Generating electron.d.ts as it is missing');
|
||||
generateTypeDefinitions();
|
||||
}
|
||||
|
||||
await runElectronTests()
|
||||
await runElectronTests();
|
||||
}
|
||||
|
||||
function generateTypeDefinitions () {
|
||||
|
@ -78,80 +78,80 @@ function generateTypeDefinitions () {
|
|||
cwd: path.resolve(__dirname, '..'),
|
||||
stdio: 'inherit',
|
||||
shell: true
|
||||
})
|
||||
});
|
||||
if (status !== 0) {
|
||||
throw new Error(`Electron typescript definition generation failed with exit code: ${status}.`)
|
||||
throw new Error(`Electron typescript definition generation failed with exit code: ${status}.`);
|
||||
}
|
||||
}
|
||||
|
||||
function loadLastSpecHash () {
|
||||
return fs.existsSync(specHashPath)
|
||||
? fs.readFileSync(specHashPath, 'utf8').split('\n')
|
||||
: [null, null]
|
||||
: [null, null];
|
||||
}
|
||||
|
||||
function saveSpecHash ([newSpecHash, newSpecInstallHash]) {
|
||||
fs.writeFileSync(specHashPath, `${newSpecHash}\n${newSpecInstallHash}`)
|
||||
fs.writeFileSync(specHashPath, `${newSpecHash}\n${newSpecInstallHash}`);
|
||||
}
|
||||
|
||||
async function runElectronTests () {
|
||||
const errors = []
|
||||
const errors = [];
|
||||
|
||||
const testResultsDir = process.env.ELECTRON_TEST_RESULTS_DIR
|
||||
const testResultsDir = process.env.ELECTRON_TEST_RESULTS_DIR;
|
||||
for (const [runnerId, { description, run }] of runners) {
|
||||
if (runnersToRun && !runnersToRun.includes(runnerId)) {
|
||||
console.info('\nSkipping:', description)
|
||||
continue
|
||||
console.info('\nSkipping:', description);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
console.info('\nRunning:', description)
|
||||
console.info('\nRunning:', description);
|
||||
if (testResultsDir) {
|
||||
process.env.MOCHA_FILE = path.join(testResultsDir, `test-results-${runnerId}.xml`)
|
||||
process.env.MOCHA_FILE = path.join(testResultsDir, `test-results-${runnerId}.xml`);
|
||||
}
|
||||
await run()
|
||||
await run();
|
||||
} catch (err) {
|
||||
errors.push([runnerId, err])
|
||||
errors.push([runnerId, err]);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length !== 0) {
|
||||
for (const err of errors) {
|
||||
console.error('\n\nRunner Failed:', err[0])
|
||||
console.error(err[1])
|
||||
console.error('\n\nRunner Failed:', err[0]);
|
||||
console.error(err[1]);
|
||||
}
|
||||
console.log(`${fail} Electron test runners have failed`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Electron test runners have failed`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function runRemoteBasedElectronTests () {
|
||||
let exe = path.resolve(BASE, utils.getElectronExec())
|
||||
const runnerArgs = ['electron/spec', ...unknownArgs.slice(2)]
|
||||
let exe = path.resolve(BASE, utils.getElectronExec());
|
||||
const runnerArgs = ['electron/spec', ...unknownArgs.slice(2)];
|
||||
if (process.platform === 'linux') {
|
||||
runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe)
|
||||
exe = 'python'
|
||||
runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe);
|
||||
exe = 'python';
|
||||
}
|
||||
|
||||
const { status } = childProcess.spawnSync(exe, runnerArgs, {
|
||||
cwd: path.resolve(__dirname, '../..'),
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
if (status !== 0) {
|
||||
const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString()
|
||||
console.log(`${fail} Electron tests failed with code ${textStatus}.`)
|
||||
process.exit(1)
|
||||
const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString();
|
||||
console.log(`${fail} Electron tests failed with code ${textStatus}.`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} Electron remote process tests passed.`)
|
||||
console.log(`${pass} Electron remote process tests passed.`);
|
||||
}
|
||||
|
||||
async function runNativeElectronTests () {
|
||||
let testTargets = require('./native-test-targets.json')
|
||||
const outDir = `out/${utils.getOutDir()}`
|
||||
let testTargets = require('./native-test-targets.json');
|
||||
const outDir = `out/${utils.getOutDir()}`;
|
||||
|
||||
// If native tests are being run, only one arg would be relevant
|
||||
if (args.target && !testTargets.includes(args.target)) {
|
||||
console.log(`${fail} ${args.target} must be a subset of [${[testTargets].join(', ')}]`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} ${args.target} must be a subset of [${[testTargets].join(', ')}]`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Optionally build all native test targets
|
||||
|
@ -160,108 +160,108 @@ async function runNativeElectronTests () {
|
|||
const build = childProcess.spawnSync('ninja', ['-C', outDir, target], {
|
||||
cwd: path.resolve(__dirname, '../..'),
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
|
||||
// Exit if test target failed to build
|
||||
if (build.status !== 0) {
|
||||
console.log(`${fail} ${target} failed to build.`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} ${target} failed to build.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If a specific target was passed, only build and run that target
|
||||
if (args.target) testTargets = [args.target]
|
||||
if (args.target) testTargets = [args.target];
|
||||
|
||||
// Run test targets
|
||||
const failures = []
|
||||
const failures = [];
|
||||
for (const target of testTargets) {
|
||||
console.info('\nRunning native test for target:', target)
|
||||
console.info('\nRunning native test for target:', target);
|
||||
const testRun = childProcess.spawnSync(`./${outDir}/${target}`, {
|
||||
cwd: path.resolve(__dirname, '../..'),
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
|
||||
// Collect failures and log at end
|
||||
if (testRun.status !== 0) failures.push({ target })
|
||||
if (testRun.status !== 0) failures.push({ target });
|
||||
}
|
||||
|
||||
// Exit if any failures
|
||||
if (failures.length > 0) {
|
||||
console.log(`${fail} Electron native tests failed for the following targets: `, failures)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Electron native tests failed for the following targets: `, failures);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`${pass} Electron native tests passed.`)
|
||||
console.log(`${pass} Electron native tests passed.`);
|
||||
}
|
||||
|
||||
async function runMainProcessElectronTests () {
|
||||
let exe = path.resolve(BASE, utils.getElectronExec())
|
||||
const runnerArgs = ['electron/spec-main', ...unknownArgs.slice(2)]
|
||||
let exe = path.resolve(BASE, utils.getElectronExec());
|
||||
const runnerArgs = ['electron/spec-main', ...unknownArgs.slice(2)];
|
||||
if (process.platform === 'linux') {
|
||||
runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe)
|
||||
exe = 'python'
|
||||
runnerArgs.unshift(path.resolve(__dirname, 'dbus_mock.py'), exe);
|
||||
exe = 'python';
|
||||
}
|
||||
|
||||
const { status, signal } = childProcess.spawnSync(exe, runnerArgs, {
|
||||
cwd: path.resolve(__dirname, '../..'),
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
if (status !== 0) {
|
||||
if (status) {
|
||||
const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString()
|
||||
console.log(`${fail} Electron tests failed with code ${textStatus}.`)
|
||||
const textStatus = process.platform === 'win32' ? `0x${status.toString(16)}` : status.toString();
|
||||
console.log(`${fail} Electron tests failed with code ${textStatus}.`);
|
||||
} else {
|
||||
console.log(`${fail} Electron tests failed with kill signal ${signal}.`)
|
||||
console.log(`${fail} Electron tests failed with kill signal ${signal}.`);
|
||||
}
|
||||
process.exit(1)
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} Electron main process tests passed.`)
|
||||
console.log(`${pass} Electron main process tests passed.`);
|
||||
}
|
||||
|
||||
async function installSpecModules (dir) {
|
||||
const nodeDir = path.resolve(BASE, `out/${utils.getOutDir({ shouldLog: true })}/gen/node_headers`)
|
||||
const nodeDir = path.resolve(BASE, `out/${utils.getOutDir({ shouldLog: true })}/gen/node_headers`);
|
||||
const env = Object.assign({}, process.env, {
|
||||
npm_config_nodedir: nodeDir,
|
||||
npm_config_msvs_version: '2019'
|
||||
})
|
||||
});
|
||||
const { status } = childProcess.spawnSync(NPX_CMD, [`yarn@${YARN_VERSION}`, 'install', '--frozen-lockfile'], {
|
||||
env,
|
||||
cwd: dir,
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
if (status !== 0 && !process.env.IGNORE_YARN_INSTALL_ERROR) {
|
||||
console.log(`${fail} Failed to yarn install in '${dir}'`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Failed to yarn install in '${dir}'`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function getSpecHash () {
|
||||
return Promise.all([
|
||||
(async () => {
|
||||
const hasher = crypto.createHash('SHA256')
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec/package.json')))
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec-main/package.json')))
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec/yarn.lock')))
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec-main/yarn.lock')))
|
||||
return hasher.digest('hex')
|
||||
const hasher = crypto.createHash('SHA256');
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec/package.json')));
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec-main/package.json')));
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec/yarn.lock')));
|
||||
hasher.update(fs.readFileSync(path.resolve(__dirname, '../spec-main/yarn.lock')));
|
||||
return hasher.digest('hex');
|
||||
})(),
|
||||
(async () => {
|
||||
const specNodeModulesPath = path.resolve(__dirname, '../spec/node_modules')
|
||||
const specNodeModulesPath = path.resolve(__dirname, '../spec/node_modules');
|
||||
if (!fs.existsSync(specNodeModulesPath)) {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
const { hash } = await hashElement(specNodeModulesPath, {
|
||||
folders: {
|
||||
exclude: ['.bin']
|
||||
}
|
||||
})
|
||||
return hash
|
||||
});
|
||||
return hash;
|
||||
})()
|
||||
])
|
||||
]);
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('An error occurred inside the spec runner:', error)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error('An error occurred inside the spec runner:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
const cp = require('child_process')
|
||||
const utils = require('./lib/utils')
|
||||
const electronPath = utils.getAbsoluteElectronExec()
|
||||
const cp = require('child_process');
|
||||
const utils = require('./lib/utils');
|
||||
const electronPath = utils.getAbsoluteElectronExec();
|
||||
|
||||
const child = cp.spawn(electronPath, process.argv.slice(2), { stdio: 'inherit' })
|
||||
child.on('close', (code) => process.exit(code))
|
||||
const child = cp.spawn(electronPath, process.argv.slice(2), { stdio: 'inherit' });
|
||||
child.on('close', (code) => process.exit(code));
|
||||
|
||||
const handleTerminationSignal = (signal) =>
|
||||
process.on(signal, () => {
|
||||
if (!child.killed) {
|
||||
child.kill(signal)
|
||||
child.kill(signal);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
handleTerminationSignal('SIGINT')
|
||||
handleTerminationSignal('SIGTERM')
|
||||
handleTerminationSignal('SIGINT');
|
||||
handleTerminationSignal('SIGTERM');
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
const cp = require('child_process')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const cp = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const YARN_VERSION = /'yarn_version': '(.+?)'/.exec(fs.readFileSync(path.resolve(__dirname, '../DEPS'), 'utf8'))[1]
|
||||
const YARN_VERSION = /'yarn_version': '(.+?)'/.exec(fs.readFileSync(path.resolve(__dirname, '../DEPS'), 'utf8'))[1];
|
||||
|
||||
exports.YARN_VERSION = YARN_VERSION
|
||||
exports.YARN_VERSION = YARN_VERSION;
|
||||
|
||||
// If we are running "node script/yarn" run as the yarn CLI
|
||||
if (process.mainModule === module) {
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx'
|
||||
const NPX_CMD = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
|
||||
const child = cp.spawn(NPX_CMD, [`yarn@${YARN_VERSION}`, ...process.argv.slice(2)], {
|
||||
stdio: 'inherit'
|
||||
})
|
||||
});
|
||||
|
||||
child.on('exit', code => process.exit(code))
|
||||
child.on('exit', code => process.exit(code));
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue