build: enable JS semicolons (#22783)
This commit is contained in:
parent
24e21467b9
commit
5d657dece4
354 changed files with 21512 additions and 21510 deletions
|
@ -1,18 +1,18 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const assert = require('assert')
|
||||
const request = require('request')
|
||||
const assert = require('assert');
|
||||
const request = require('request');
|
||||
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds'
|
||||
const CIRCLECI_PIPELINE_URL = 'https://circleci.com/api/v2/project/gh/electron/electron/pipeline'
|
||||
const VSTS_URL = 'https://github.visualstudio.com/electron/_apis/build'
|
||||
const CIRCLECI_WAIT_TIME = process.env.CIRCLECI_WAIT_TIME || 30000
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds';
|
||||
const CIRCLECI_PIPELINE_URL = 'https://circleci.com/api/v2/project/gh/electron/electron/pipeline';
|
||||
const VSTS_URL = 'https://github.visualstudio.com/electron/_apis/build';
|
||||
const CIRCLECI_WAIT_TIME = process.env.CIRCLECI_WAIT_TIME || 30000;
|
||||
|
||||
const appVeyorJobs = {
|
||||
'electron-x64': 'electron-x64-release',
|
||||
'electron-ia32': 'electron-ia32-release',
|
||||
'electron-woa': 'electron-woa-release'
|
||||
}
|
||||
};
|
||||
|
||||
const circleCIJobs = [
|
||||
'linux-arm-publish',
|
||||
|
@ -21,50 +21,50 @@ const circleCIJobs = [
|
|||
'linux-x64-publish',
|
||||
'mas-publish',
|
||||
'osx-publish'
|
||||
]
|
||||
];
|
||||
|
||||
const circleCIPublishWorkflows = [
|
||||
'linux-publish',
|
||||
'macos-publish'
|
||||
]
|
||||
];
|
||||
|
||||
const vstsArmJobs = [
|
||||
'electron-arm-testing',
|
||||
'electron-arm64-testing',
|
||||
'electron-woa-testing'
|
||||
]
|
||||
];
|
||||
|
||||
let jobRequestedCount = 0
|
||||
let jobRequestedCount = 0;
|
||||
|
||||
async function makeRequest (requestOptions, parseResponse) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request(requestOptions, (err, res, body) => {
|
||||
if (!err && res.statusCode >= 200 && res.statusCode < 300) {
|
||||
if (parseResponse) {
|
||||
const build = JSON.parse(body)
|
||||
resolve(build)
|
||||
const build = JSON.parse(body);
|
||||
resolve(build);
|
||||
} else {
|
||||
resolve(body)
|
||||
resolve(body);
|
||||
}
|
||||
} else {
|
||||
console.error('Error occurred while requesting:', requestOptions.url)
|
||||
console.error('Error occurred while requesting:', requestOptions.url);
|
||||
if (parseResponse) {
|
||||
try {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body))
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || JSON.parse(res.body));
|
||||
} catch (err) {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, res.body)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, res.body);
|
||||
}
|
||||
} else {
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body)
|
||||
console.log('Error: ', `(status ${res.statusCode})`, err || res.body);
|
||||
}
|
||||
reject(err)
|
||||
reject(err);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function circleCIcall (targetBranch, job, options) {
|
||||
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering CircleCI to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const buildRequest = {
|
||||
branch: targetBranch,
|
||||
parameters: {
|
||||
|
@ -72,82 +72,82 @@ async function circleCIcall (targetBranch, job, options) {
|
|||
'run-build-linux': false,
|
||||
'run-build-mac': false
|
||||
}
|
||||
}
|
||||
};
|
||||
if (options.ghRelease) {
|
||||
buildRequest.parameters['upload-to-s3'] = '0'
|
||||
buildRequest.parameters['upload-to-s3'] = '0';
|
||||
} else {
|
||||
buildRequest.parameters['upload-to-s3'] = '1'
|
||||
buildRequest.parameters['upload-to-s3'] = '1';
|
||||
}
|
||||
buildRequest.parameters[`run-${job}`] = true
|
||||
jobRequestedCount++
|
||||
buildRequest.parameters[`run-${job}`] = true;
|
||||
jobRequestedCount++;
|
||||
// The logic below expects that the CircleCI workflows for releases each
|
||||
// contain only one job in order to maintain compatibility with sudowoodo.
|
||||
// If the workflows are changed in the CircleCI config.yml, this logic will
|
||||
// also need to be changed as well as possibly changing sudowoodo.
|
||||
try {
|
||||
const circleResponse = await circleCIRequest(CIRCLECI_PIPELINE_URL, 'POST', buildRequest)
|
||||
console.log(`CircleCI release build pipeline ${circleResponse.id} for ${job} triggered.`)
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${circleResponse.id}`
|
||||
const workflowId = await getCircleCIWorkflowId(circleResponse.id)
|
||||
const circleResponse = await circleCIRequest(CIRCLECI_PIPELINE_URL, 'POST', buildRequest);
|
||||
console.log(`CircleCI release build pipeline ${circleResponse.id} for ${job} triggered.`);
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${circleResponse.id}`;
|
||||
const workflowId = await getCircleCIWorkflowId(circleResponse.id);
|
||||
if (workflowId === -1) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const workFlowUrl = `https://circleci.com/workflow-run/${workflowId}`
|
||||
const workFlowUrl = `https://circleci.com/workflow-run/${workflowId}`;
|
||||
if (options.runningPublishWorkflows) {
|
||||
console.log(`CircleCI release workflow request for ${job} successful. Check ${workFlowUrl} for status.`)
|
||||
console.log(`CircleCI release workflow request for ${job} successful. Check ${workFlowUrl} for status.`);
|
||||
} else {
|
||||
console.log(`CircleCI release build workflow running at https://circleci.com/workflow-run/${workflowId} for ${job}.`)
|
||||
const jobNumber = await getCircleCIJobNumber(workflowId)
|
||||
console.log(`CircleCI release build workflow running at https://circleci.com/workflow-run/${workflowId} for ${job}.`);
|
||||
const jobNumber = await getCircleCIJobNumber(workflowId);
|
||||
if (jobNumber === -1) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const jobUrl = `https://circleci.com/gh/electron/electron/${jobNumber}`
|
||||
console.log(`CircleCI release build request for ${job} successful. Check ${jobUrl} for status.`)
|
||||
const jobUrl = `https://circleci.com/gh/electron/electron/${jobNumber}`;
|
||||
console.log(`CircleCI release build request for ${job} successful. Check ${jobUrl} for status.`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error calling CircleCI: ', err)
|
||||
console.log('Error calling CircleCI: ', err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCircleCIWorkflowId (pipelineId) {
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${pipelineId}`
|
||||
let workflowId = 0
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${pipelineId}`;
|
||||
let workflowId = 0;
|
||||
while (workflowId === 0) {
|
||||
const pipelineInfo = await circleCIRequest(pipelineInfoUrl, 'GET')
|
||||
const pipelineInfo = await circleCIRequest(pipelineInfoUrl, 'GET');
|
||||
switch (pipelineInfo.state) {
|
||||
case 'created': {
|
||||
const workflows = await circleCIRequest(`${pipelineInfoUrl}/workflow`, 'GET')
|
||||
const workflows = await circleCIRequest(`${pipelineInfoUrl}/workflow`, 'GET');
|
||||
if (workflows.items.length === 1) {
|
||||
workflowId = workflows.items[0].id
|
||||
break
|
||||
workflowId = workflows.items[0].id;
|
||||
break;
|
||||
}
|
||||
console.log('Unxpected number of workflows, response was:', pipelineInfo)
|
||||
workflowId = -1
|
||||
break
|
||||
console.log('Unxpected number of workflows, response was:', pipelineInfo);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
case 'error': {
|
||||
console.log('Error retrieving workflows, response was:', pipelineInfo)
|
||||
workflowId = -1
|
||||
break
|
||||
console.log('Error retrieving workflows, response was:', pipelineInfo);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME))
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return workflowId
|
||||
return workflowId;
|
||||
}
|
||||
|
||||
async function getCircleCIJobNumber (workflowId) {
|
||||
const jobInfoUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`
|
||||
let jobNumber = 0
|
||||
const jobInfoUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`;
|
||||
let jobNumber = 0;
|
||||
while (jobNumber === 0) {
|
||||
const jobInfo = await circleCIRequest(jobInfoUrl, 'GET')
|
||||
const jobInfo = await circleCIRequest(jobInfoUrl, 'GET');
|
||||
if (!jobInfo.items) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (jobInfo.items.length !== 1) {
|
||||
console.log('Unxpected number of jobs, response was:', jobInfo)
|
||||
jobNumber = -1
|
||||
break
|
||||
console.log('Unxpected number of jobs, response was:', jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (jobInfo.items[0].status) {
|
||||
|
@ -155,9 +155,9 @@ async function getCircleCIJobNumber (workflowId) {
|
|||
case 'queued':
|
||||
case 'running': {
|
||||
if (jobInfo.items[0].job_number && !isNaN(jobInfo.items[0].job_number)) {
|
||||
jobNumber = jobInfo.items[0].job_number
|
||||
jobNumber = jobInfo.items[0].job_number;
|
||||
}
|
||||
break
|
||||
break;
|
||||
}
|
||||
case 'canceled':
|
||||
case 'error':
|
||||
|
@ -165,14 +165,14 @@ async function getCircleCIJobNumber (workflowId) {
|
|||
case 'timedout':
|
||||
case 'not_run':
|
||||
case 'failed': {
|
||||
console.log(`Error job returned a status of ${jobInfo.items[0].status}, response was:`, jobInfo)
|
||||
jobNumber = -1
|
||||
break
|
||||
console.log(`Error job returned a status of ${jobInfo.items[0].status}, response was:`, jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME))
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return jobNumber
|
||||
return jobNumber;
|
||||
}
|
||||
|
||||
async function circleCIRequest (url, method, requestBody) {
|
||||
|
@ -189,28 +189,28 @@ async function circleCIRequest (url, method, requestBody) {
|
|||
},
|
||||
body: requestBody ? JSON.stringify(requestBody) : null
|
||||
}, true).catch(err => {
|
||||
console.log('Error calling CircleCI:', err)
|
||||
})
|
||||
console.log('Error calling CircleCI:', err);
|
||||
});
|
||||
}
|
||||
|
||||
function buildAppVeyor (targetBranch, options) {
|
||||
const validJobs = Object.keys(appVeyorJobs)
|
||||
const validJobs = Object.keys(appVeyorJobs);
|
||||
if (options.job) {
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`)
|
||||
callAppVeyor(targetBranch, options.job, options)
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`);
|
||||
callAppVeyor(targetBranch, options.job, options);
|
||||
} else {
|
||||
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options))
|
||||
validJobs.forEach((job) => callAppVeyor(targetBranch, job, options));
|
||||
}
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch, job, options) {
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
ELECTRON_RELEASE: 1
|
||||
}
|
||||
};
|
||||
|
||||
if (!options.ghRelease) {
|
||||
environmentVariables.UPLOAD_TO_S3 = 1
|
||||
environmentVariables.UPLOAD_TO_S3 = 1;
|
||||
}
|
||||
|
||||
const requestOpts = {
|
||||
|
@ -228,44 +228,44 @@ async function callAppVeyor (targetBranch, job, options) {
|
|||
environmentVariables
|
||||
}),
|
||||
method: 'POST'
|
||||
}
|
||||
jobRequestedCount++
|
||||
};
|
||||
jobRequestedCount++;
|
||||
const appVeyorResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling AppVeyor:', err)
|
||||
})
|
||||
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`)
|
||||
console.log('Error calling AppVeyor:', err);
|
||||
});
|
||||
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${appVeyorResponse.version}`;
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`);
|
||||
}
|
||||
|
||||
function buildCircleCI (targetBranch, options) {
|
||||
if (options.job) {
|
||||
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`)
|
||||
circleCIcall(targetBranch, options.job, options)
|
||||
assert(circleCIJobs.includes(options.job), `Unknown CircleCI job name: ${options.job}. Valid values are: ${circleCIJobs}.`);
|
||||
circleCIcall(targetBranch, options.job, options);
|
||||
} else {
|
||||
options.runningPublishWorkflows = true
|
||||
circleCIPublishWorkflows.forEach((job) => circleCIcall(targetBranch, job, options))
|
||||
options.runningPublishWorkflows = true;
|
||||
circleCIPublishWorkflows.forEach((job) => circleCIcall(targetBranch, job, options));
|
||||
}
|
||||
}
|
||||
|
||||
async function buildVSTS (targetBranch, options) {
|
||||
if (options.armTest) {
|
||||
assert(vstsArmJobs.includes(options.job), `Unknown VSTS CI arm test job name: ${options.job}. Valid values are: ${vstsArmJobs}.`)
|
||||
assert(vstsArmJobs.includes(options.job), `Unknown VSTS CI arm test job name: ${options.job}. Valid values are: ${vstsArmJobs}.`);
|
||||
}
|
||||
|
||||
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`)
|
||||
console.log(`Triggering VSTS to run build on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
ELECTRON_RELEASE: 1
|
||||
}
|
||||
};
|
||||
|
||||
if (options.armTest) {
|
||||
if (options.circleBuildNum) {
|
||||
environmentVariables.CIRCLE_BUILD_NUM = options.circleBuildNum
|
||||
environmentVariables.CIRCLE_BUILD_NUM = options.circleBuildNum;
|
||||
} else if (options.appveyorJobId) {
|
||||
environmentVariables.APPVEYOR_JOB_ID = options.appveyorJobId
|
||||
environmentVariables.APPVEYOR_JOB_ID = options.appveyorJobId;
|
||||
}
|
||||
} else {
|
||||
if (!options.ghRelease) {
|
||||
environmentVariables.UPLOAD_TO_S3 = 1
|
||||
environmentVariables.UPLOAD_TO_S3 = 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,12 +278,12 @@ async function buildVSTS (targetBranch, options) {
|
|||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
};
|
||||
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log('Error calling VSTS to get build definitions:', err)
|
||||
})
|
||||
const buildsToRun = vstsResponse.value.filter(build => build.name === options.job)
|
||||
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables))
|
||||
console.log('Error calling VSTS to get build definitions:', err);
|
||||
});
|
||||
const buildsToRun = vstsResponse.value.filter(build => build.name === options.job);
|
||||
buildsToRun.forEach((build) => callVSTSBuild(build, targetBranch, environmentVariables));
|
||||
}
|
||||
|
||||
async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
||||
|
@ -291,9 +291,9 @@ async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
|||
definition: build,
|
||||
sourceBranch: targetBranch,
|
||||
priority: 'high'
|
||||
}
|
||||
};
|
||||
if (Object.keys(environmentVariables).length !== 0) {
|
||||
buildBody.parameters = JSON.stringify(environmentVariables)
|
||||
buildBody.parameters = JSON.stringify(environmentVariables);
|
||||
}
|
||||
const requestOpts = {
|
||||
url: `${VSTS_URL}/builds?api-version=4.1`,
|
||||
|
@ -306,54 +306,54 @@ async function callVSTSBuild (build, targetBranch, environmentVariables) {
|
|||
},
|
||||
body: JSON.stringify(buildBody),
|
||||
method: 'POST'
|
||||
}
|
||||
jobRequestedCount++
|
||||
};
|
||||
jobRequestedCount++;
|
||||
const vstsResponse = await makeRequest(requestOpts, true).catch(err => {
|
||||
console.log(`Error calling VSTS for job ${build.name}`, err)
|
||||
})
|
||||
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`)
|
||||
console.log(`Error calling VSTS for job ${build.name}`, err);
|
||||
});
|
||||
console.log(`VSTS release build request for ${build.name} successful. Check ${vstsResponse._links.web.href} for status.`);
|
||||
}
|
||||
|
||||
function runRelease (targetBranch, options) {
|
||||
if (options.ci) {
|
||||
switch (options.ci) {
|
||||
case 'CircleCI': {
|
||||
buildCircleCI(targetBranch, options)
|
||||
break
|
||||
buildCircleCI(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
case 'AppVeyor': {
|
||||
buildAppVeyor(targetBranch, options)
|
||||
break
|
||||
buildAppVeyor(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
case 'VSTS': {
|
||||
buildVSTS(targetBranch, options)
|
||||
break
|
||||
buildVSTS(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`)
|
||||
process.exit(1)
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
buildCircleCI(targetBranch, options)
|
||||
buildAppVeyor(targetBranch, options)
|
||||
buildCircleCI(targetBranch, options);
|
||||
buildAppVeyor(targetBranch, options);
|
||||
}
|
||||
console.log(`${jobRequestedCount} jobs were requested.`)
|
||||
console.log(`${jobRequestedCount} jobs were requested.`);
|
||||
}
|
||||
|
||||
module.exports = runRelease
|
||||
module.exports = runRelease;
|
||||
|
||||
if (require.main === module) {
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['ghRelease', 'armTest']
|
||||
})
|
||||
const targetBranch = args._[0]
|
||||
});
|
||||
const targetBranch = args._[0];
|
||||
if (args._.length < 1) {
|
||||
console.log(`Trigger CI to build release builds of electron.
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--ci=CircleCI|AppVeyor|VSTS]
|
||||
[--ghRelease] [--armTest] [--circleBuildNum=xxx] [--appveyorJobId=xxx] TARGET_BRANCH
|
||||
`)
|
||||
process.exit(0)
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
runRelease(targetBranch, args)
|
||||
runRelease(targetBranch, args);
|
||||
}
|
||||
|
|
|
@ -1,38 +1,38 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.log('Usage: find-release version')
|
||||
process.exit(1)
|
||||
console.log('Usage: find-release version');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = process.argv[2]
|
||||
const version = process.argv[2];
|
||||
|
||||
async function findRelease () {
|
||||
const releases = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
})
|
||||
});
|
||||
|
||||
const targetRelease = releases.data.find(release => release.tag_name === version)
|
||||
let returnObject = {}
|
||||
const targetRelease = releases.data.find(release => release.tag_name === version);
|
||||
let returnObject = {};
|
||||
|
||||
if (targetRelease) {
|
||||
returnObject = {
|
||||
id: targetRelease.id,
|
||||
draft: targetRelease.draft,
|
||||
exists: true
|
||||
}
|
||||
};
|
||||
} else {
|
||||
returnObject = {
|
||||
exists: false,
|
||||
draft: false
|
||||
}
|
||||
};
|
||||
}
|
||||
console.log(JSON.stringify(returnObject))
|
||||
console.log(JSON.stringify(returnObject));
|
||||
}
|
||||
|
||||
findRelease()
|
||||
findRelease();
|
||||
|
|
|
@ -1,53 +1,53 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const minimist = require('minimist')
|
||||
const path = require('path')
|
||||
const semver = require('semver')
|
||||
const { GitProcess } = require('dugite');
|
||||
const minimist = require('minimist');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils')
|
||||
const notesGenerator = require('./notes.js')
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const notesGenerator = require('./notes.js');
|
||||
|
||||
const semverify = version => version.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.')
|
||||
const semverify = version => version.replace(/^origin\//, '').replace('x', '0').replace(/-/g, '.');
|
||||
|
||||
const runGit = async (args) => {
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR)
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim())
|
||||
throw new Error(response.stderr.trim());
|
||||
}
|
||||
return response.stdout.trim()
|
||||
}
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported')
|
||||
const tagIsBeta = tag => tag.includes('beta')
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag)
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsBeta = tag => tag.includes('beta');
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag);
|
||||
|
||||
const getTagsOf = async (point) => {
|
||||
return (await runGit(['tag', '--merged', point]))
|
||||
.split('\n')
|
||||
.map(tag => tag.trim())
|
||||
.filter(tag => semver.valid(tag))
|
||||
.sort(semver.compare)
|
||||
}
|
||||
.sort(semver.compare);
|
||||
};
|
||||
|
||||
const getTagsOnBranch = async (point) => {
|
||||
const masterTags = await getTagsOf('master')
|
||||
const masterTags = await getTagsOf('master');
|
||||
if (point === 'master') {
|
||||
return masterTags
|
||||
return masterTags;
|
||||
}
|
||||
|
||||
const masterTagsSet = new Set(masterTags)
|
||||
return (await getTagsOf(point)).filter(tag => !masterTagsSet.has(tag))
|
||||
}
|
||||
const masterTagsSet = new Set(masterTags);
|
||||
return (await getTagsOf(point)).filter(tag => !masterTagsSet.has(tag));
|
||||
};
|
||||
|
||||
const getBranchOf = async (point) => {
|
||||
const branches = (await runGit(['branch', '-a', '--contains', point]))
|
||||
.split('\n')
|
||||
.map(branch => branch.trim())
|
||||
.filter(branch => !!branch)
|
||||
const current = branches.find(branch => branch.startsWith('* '))
|
||||
return current ? current.slice(2) : branches.shift()
|
||||
}
|
||||
.filter(branch => !!branch);
|
||||
const current = branches.find(branch => branch.startsWith('* '));
|
||||
return current ? current.slice(2) : branches.shift();
|
||||
};
|
||||
|
||||
const getAllBranches = async () => {
|
||||
return (await runGit(['branch', '--remote']))
|
||||
|
@ -55,101 +55,101 @@ const getAllBranches = async () => {
|
|||
.map(branch => branch.trim())
|
||||
.filter(branch => !!branch)
|
||||
.filter(branch => branch !== 'origin/HEAD -> origin/master')
|
||||
.sort()
|
||||
}
|
||||
.sort();
|
||||
};
|
||||
|
||||
const getStabilizationBranches = async () => {
|
||||
return (await getAllBranches())
|
||||
.filter(branch => /^origin\/\d+-\d+-x$/.test(branch))
|
||||
}
|
||||
.filter(branch => /^origin\/\d+-\d+-x$/.test(branch));
|
||||
};
|
||||
|
||||
const getPreviousStabilizationBranch = async (current) => {
|
||||
const stabilizationBranches = (await getStabilizationBranches())
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`)
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`);
|
||||
|
||||
if (!semver.valid(current)) {
|
||||
// since we don't seem to be on a stabilization branch right now,
|
||||
// pick a placeholder name that will yield the newest branch
|
||||
// as a comparison point.
|
||||
current = 'v999.999.999'
|
||||
current = 'v999.999.999';
|
||||
}
|
||||
|
||||
let newestMatch = null
|
||||
let newestMatch = null;
|
||||
for (const branch of stabilizationBranches) {
|
||||
if (semver.gte(semverify(branch), semverify(current))) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
newestMatch = branch
|
||||
newestMatch = branch;
|
||||
}
|
||||
return newestMatch
|
||||
}
|
||||
return newestMatch;
|
||||
};
|
||||
|
||||
const getPreviousPoint = async (point) => {
|
||||
const currentBranch = await getBranchOf(point)
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()
|
||||
const currentIsStable = tagIsStable(currentTag)
|
||||
const currentBranch = await getBranchOf(point);
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop();
|
||||
const currentIsStable = tagIsStable(currentTag);
|
||||
|
||||
try {
|
||||
// First see if there's an earlier tag on the same branch
|
||||
// that can serve as a reference point.
|
||||
let tags = (await getTagsOnBranch(`${point}^`)).filter(tag => tagIsSupported(tag))
|
||||
let tags = (await getTagsOnBranch(`${point}^`)).filter(tag => tagIsSupported(tag));
|
||||
if (currentIsStable) {
|
||||
tags = tags.filter(tag => tagIsStable(tag))
|
||||
tags = tags.filter(tag => tagIsStable(tag));
|
||||
}
|
||||
if (tags.length) {
|
||||
return tags.pop()
|
||||
return tags.pop();
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error', error)
|
||||
console.log('error', error);
|
||||
}
|
||||
|
||||
// Otherwise, use the newest stable release that preceeds this branch.
|
||||
// To reach that you may have to walk past >1 branch, e.g. to get past
|
||||
// 2-1-x which never had a stable release.
|
||||
let branch = currentBranch
|
||||
let branch = currentBranch;
|
||||
while (branch) {
|
||||
const prevBranch = await getPreviousStabilizationBranch(branch)
|
||||
const tags = (await getTagsOnBranch(prevBranch)).filter(tag => tagIsStable(tag))
|
||||
const prevBranch = await getPreviousStabilizationBranch(branch);
|
||||
const tags = (await getTagsOnBranch(prevBranch)).filter(tag => tagIsStable(tag));
|
||||
if (tags.length) {
|
||||
return tags.pop()
|
||||
return tags.pop();
|
||||
}
|
||||
branch = prevBranch
|
||||
branch = prevBranch;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function getReleaseNotes (range, newVersion, explicitLinks) {
|
||||
const rangeList = range.split('..') || ['HEAD']
|
||||
const to = rangeList.pop()
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to))
|
||||
const rangeList = range.split('..') || ['HEAD'];
|
||||
const to = rangeList.pop();
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to));
|
||||
|
||||
if (!newVersion) {
|
||||
newVersion = to
|
||||
newVersion = to;
|
||||
}
|
||||
|
||||
console.log(`Generating release notes between ${from} and ${to} for version ${newVersion}`)
|
||||
const notes = await notesGenerator.get(from, to, newVersion)
|
||||
console.log(`Generating release notes between ${from} and ${to} for version ${newVersion}`);
|
||||
const notes = await notesGenerator.get(from, to, newVersion);
|
||||
const ret = {
|
||||
text: notesGenerator.render(notes, explicitLinks)
|
||||
}
|
||||
};
|
||||
|
||||
if (notes.unknown.length) {
|
||||
ret.warning = `You have ${notes.unknown.length} unknown release notes. Please fix them before releasing.`
|
||||
ret.warning = `You have ${notes.unknown.length} unknown release notes. Please fix them before releasing.`;
|
||||
}
|
||||
|
||||
return ret
|
||||
return ret;
|
||||
}
|
||||
|
||||
async function main () {
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['explicit-links', 'help'],
|
||||
string: ['version']
|
||||
})
|
||||
opts.range = opts._.shift()
|
||||
});
|
||||
opts.range = opts._.shift();
|
||||
if (opts.help || !opts.range) {
|
||||
const name = path.basename(process.argv[1])
|
||||
const name = path.basename(process.argv[1]);
|
||||
console.log(`
|
||||
easy usage: ${name} version
|
||||
|
||||
|
@ -165,22 +165,22 @@ full usage: ${name} [begin..]end [--version version] [--explicit-links]
|
|||
For example, these invocations are equivalent:
|
||||
${process.argv[1]} v4.0.1
|
||||
${process.argv[1]} v4.0.0..v4.0.1 --version v4.0.1
|
||||
`)
|
||||
return 0
|
||||
`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts['explicit-links'])
|
||||
console.log(notes.text)
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts['explicit-links']);
|
||||
console.log(notes.text);
|
||||
if (notes.warning) {
|
||||
throw new Error(notes.warning)
|
||||
throw new Error(notes.warning);
|
||||
}
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((err) => {
|
||||
console.error('Error Occurred:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error('Error Occurred:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = getReleaseNotes
|
||||
module.exports = getReleaseNotes;
|
||||
|
|
|
@ -1,75 +1,75 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require('child_process')
|
||||
const fs = require('fs')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const childProcess = require('child_process');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const { GitProcess } = require('dugite');
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
const semver = require('semver')
|
||||
});
|
||||
const semver = require('semver');
|
||||
|
||||
const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils')
|
||||
const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils');
|
||||
|
||||
const MAX_FAIL_COUNT = 3
|
||||
const CHECK_INTERVAL = 5000
|
||||
const MAX_FAIL_COUNT = 3;
|
||||
const CHECK_INTERVAL = 5000;
|
||||
|
||||
const CACHE_DIR = path.resolve(__dirname, '.cache')
|
||||
const NO_NOTES = 'No notes'
|
||||
const FOLLOW_REPOS = ['electron/electron', 'electron/node']
|
||||
const CACHE_DIR = path.resolve(__dirname, '.cache');
|
||||
const NO_NOTES = 'No notes';
|
||||
const FOLLOW_REPOS = ['electron/electron', 'electron/node'];
|
||||
|
||||
const breakTypes = new Set(['breaking-change'])
|
||||
const docTypes = new Set(['doc', 'docs'])
|
||||
const featTypes = new Set(['feat', 'feature'])
|
||||
const fixTypes = new Set(['fix'])
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci'])
|
||||
const knownTypes = new Set([...breakTypes.keys(), ...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()])
|
||||
const breakTypes = new Set(['breaking-change']);
|
||||
const docTypes = new Set(['doc', 'docs']);
|
||||
const featTypes = new Set(['feat', 'feature']);
|
||||
const fixTypes = new Set(['fix']);
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci']);
|
||||
const knownTypes = new Set([...breakTypes.keys(), ...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]);
|
||||
|
||||
const runGit = async (dir, args) => {
|
||||
const response = await GitProcess.exec(args, dir)
|
||||
const response = await GitProcess.exec(args, dir);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim())
|
||||
throw new Error(response.stderr.trim());
|
||||
}
|
||||
return response.stdout.trim()
|
||||
}
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getCommonAncestor = async (dir, point1, point2) => {
|
||||
return runGit(dir, ['merge-base', point1, point2])
|
||||
}
|
||||
return runGit(dir, ['merge-base', point1, point2]);
|
||||
};
|
||||
|
||||
const setPullRequest = (commit, owner, repo, number) => {
|
||||
if (!owner || !repo || !number) {
|
||||
throw new Error(JSON.stringify({ owner, repo, number }, null, 2))
|
||||
throw new Error(JSON.stringify({ owner, repo, number }, null, 2));
|
||||
}
|
||||
|
||||
if (!commit.originalPr) {
|
||||
commit.originalPr = commit.pr
|
||||
commit.originalPr = commit.pr;
|
||||
}
|
||||
|
||||
commit.pr = { owner, repo, number }
|
||||
commit.pr = { owner, repo, number };
|
||||
|
||||
if (!commit.originalPr) {
|
||||
commit.originalPr = commit.pr
|
||||
commit.originalPr = commit.pr;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getNoteFromClerk = async (number, owner, repo) => {
|
||||
const comments = await getComments(number, owner, repo)
|
||||
if (!comments || !comments.data) return
|
||||
const comments = await getComments(number, owner, repo);
|
||||
if (!comments || !comments.data) return;
|
||||
|
||||
const CLERK_LOGIN = 'release-clerk[bot]'
|
||||
const CLERK_NO_NOTES = '**No Release Notes**'
|
||||
const PERSIST_LEAD = '**Release Notes Persisted**\n\n'
|
||||
const QUOTE_LEAD = '> '
|
||||
const CLERK_LOGIN = 'release-clerk[bot]';
|
||||
const CLERK_NO_NOTES = '**No Release Notes**';
|
||||
const PERSIST_LEAD = '**Release Notes Persisted**\n\n';
|
||||
const QUOTE_LEAD = '> ';
|
||||
|
||||
for (const comment of comments.data.reverse()) {
|
||||
if (comment.user.login !== CLERK_LOGIN) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (comment.body === CLERK_NO_NOTES) {
|
||||
return NO_NOTES
|
||||
return NO_NOTES;
|
||||
}
|
||||
if (comment.body.startsWith(PERSIST_LEAD)) {
|
||||
return comment.body
|
||||
|
@ -79,10 +79,10 @@ const getNoteFromClerk = async (number, owner, repo) => {
|
|||
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map(line => line.slice(QUOTE_LEAD.length)) // unquote the lines
|
||||
.join(' ') // join the note lines
|
||||
.trim()
|
||||
.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// copied from https://github.com/electron/clerk/blob/master/src/index.ts#L4-L13
|
||||
const OMIT_FROM_RELEASE_NOTES_KEYS = [
|
||||
|
@ -94,36 +94,36 @@ const OMIT_FROM_RELEASE_NOTES_KEYS = [
|
|||
'nothing',
|
||||
'empty',
|
||||
'blank'
|
||||
]
|
||||
];
|
||||
|
||||
const getNoteFromBody = body => {
|
||||
if (!body) {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
const NOTE_PREFIX = 'Notes: '
|
||||
const NOTE_HEADER = '#### Release Notes'
|
||||
const NOTE_PREFIX = 'Notes: ';
|
||||
const NOTE_HEADER = '#### Release Notes';
|
||||
|
||||
let note = body
|
||||
.split(/\r?\n\r?\n/) // split into paragraphs
|
||||
.map(paragraph => paragraph.trim())
|
||||
.map(paragraph => paragraph.startsWith(NOTE_HEADER) ? paragraph.slice(NOTE_HEADER.length).trim() : paragraph)
|
||||
.find(paragraph => paragraph.startsWith(NOTE_PREFIX))
|
||||
.find(paragraph => paragraph.startsWith(NOTE_PREFIX));
|
||||
|
||||
if (note) {
|
||||
note = note
|
||||
.slice(NOTE_PREFIX.length)
|
||||
.replace(/<!--.*-->/, '') // '<!-- change summary here-->'
|
||||
.replace(/\r?\n/, ' ') // remove newlines
|
||||
.trim()
|
||||
.trim();
|
||||
}
|
||||
|
||||
if (note && OMIT_FROM_RELEASE_NOTES_KEYS.includes(note.toLowerCase())) {
|
||||
return NO_NOTES
|
||||
return NO_NOTES;
|
||||
}
|
||||
|
||||
return note
|
||||
}
|
||||
return note;
|
||||
};
|
||||
|
||||
/**
|
||||
* Looks for our project's conventions in the commit message:
|
||||
|
@ -138,71 +138,71 @@ const getNoteFromBody = body => {
|
|||
*/
|
||||
const parseCommitMessage = (commitMessage, owner, repo, commit = {}) => {
|
||||
// split commitMessage into subject & body
|
||||
let subject = commitMessage
|
||||
let body = ''
|
||||
const pos = subject.indexOf('\n')
|
||||
let subject = commitMessage;
|
||||
let body = '';
|
||||
const pos = subject.indexOf('\n');
|
||||
if (pos !== -1) {
|
||||
body = subject.slice(pos).trim()
|
||||
subject = subject.slice(0, pos).trim()
|
||||
body = subject.slice(pos).trim();
|
||||
subject = subject.slice(0, pos).trim();
|
||||
}
|
||||
|
||||
if (!commit.originalSubject) {
|
||||
commit.originalSubject = subject
|
||||
commit.originalSubject = subject;
|
||||
}
|
||||
|
||||
if (body) {
|
||||
commit.body = body
|
||||
commit.body = body;
|
||||
|
||||
const note = getNoteFromBody(body)
|
||||
if (note) { commit.note = note }
|
||||
const note = getNoteFromBody(body);
|
||||
if (note) { commit.note = note; }
|
||||
}
|
||||
|
||||
// if the subject ends in ' (#dddd)', treat it as a pull request id
|
||||
let match
|
||||
let match;
|
||||
if ((match = subject.match(/^(.*)\s\(#(\d+)\)$/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[2]))
|
||||
subject = match[1]
|
||||
setPullRequest(commit, owner, repo, parseInt(match[2]));
|
||||
subject = match[1];
|
||||
}
|
||||
|
||||
// if the subject begins with 'word:', treat it as a semantic commit
|
||||
if ((match = subject.match(/^(\w+):\s(.*)$/))) {
|
||||
const type = match[1].toLocaleLowerCase()
|
||||
const type = match[1].toLocaleLowerCase();
|
||||
if (knownTypes.has(type)) {
|
||||
commit.type = type
|
||||
subject = match[2]
|
||||
commit.type = type;
|
||||
subject = match[2];
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GitHub commit message that indicates a PR
|
||||
if ((match = subject.match(/^Merge pull request #(\d+) from (.*)$/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]))
|
||||
commit.pr.branch = match[2].trim()
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]));
|
||||
commit.pr.branch = match[2].trim();
|
||||
}
|
||||
|
||||
// Check for a trop comment that indicates a PR
|
||||
if ((match = commitMessage.match(/\bBackport of #(\d+)\b/))) {
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]))
|
||||
setPullRequest(commit, owner, repo, parseInt(match[1]));
|
||||
}
|
||||
|
||||
// https://help.github.com/articles/closing-issues-using-keywords/
|
||||
if ((match = subject.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/))) {
|
||||
commit.issueNumber = parseInt(match[1])
|
||||
commit.issueNumber = parseInt(match[1]);
|
||||
if (!commit.type) {
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
}
|
||||
}
|
||||
|
||||
// look for 'fixes' in markdown; e.g. 'Fixes [#8952](https://github.com/electron/electron/issues/8952)'
|
||||
if (!commit.issueNumber && ((match = commitMessage.match(/Fixes \[#(\d+)\]\(https:\/\/github.com\/(\w+)\/(\w+)\/issues\/(\d+)\)/)))) {
|
||||
commit.issueNumber = parseInt(match[1])
|
||||
commit.issueNumber = parseInt(match[1]);
|
||||
if (commit.pr && commit.pr.number === commit.issueNumber) {
|
||||
commit.pr = null
|
||||
commit.pr = null;
|
||||
}
|
||||
if (commit.originalPr && commit.originalPr.number === commit.issueNumber) {
|
||||
commit.originalPr = null
|
||||
commit.originalPr = null;
|
||||
}
|
||||
if (!commit.type) {
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -211,55 +211,55 @@ const parseCommitMessage = (commitMessage, owner, repo, commit = {}) => {
|
|||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.some(line => line.startsWith('BREAKING CHANGE'))) {
|
||||
commit.type = 'breaking-change'
|
||||
commit.type = 'breaking-change';
|
||||
}
|
||||
|
||||
// Check for a reversion commit
|
||||
if ((match = body.match(/This reverts commit ([a-f0-9]{40})\./))) {
|
||||
commit.revertHash = match[1]
|
||||
commit.revertHash = match[1];
|
||||
}
|
||||
|
||||
// Edge case: manual backport where commit has `owner/repo#pull` notation
|
||||
if (commitMessage.toLowerCase().includes('backport') &&
|
||||
((match = commitMessage.match(/\b(\w+)\/(\w+)#(\d+)\b/)))) {
|
||||
const [, owner, repo, number] = match
|
||||
const [, owner, repo, number] = match;
|
||||
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
|
||||
setPullRequest(commit, owner, repo, number)
|
||||
setPullRequest(commit, owner, repo, number);
|
||||
}
|
||||
}
|
||||
|
||||
// Edge case: manual backport where commit has a link to the backport PR
|
||||
if (commitMessage.includes('ackport') &&
|
||||
((match = commitMessage.match(/https:\/\/github\.com\/(\w+)\/(\w+)\/pull\/(\d+)/)))) {
|
||||
const [, owner, repo, number] = match
|
||||
const [, owner, repo, number] = match;
|
||||
if (FOLLOW_REPOS.includes(`${owner}/${repo}`)) {
|
||||
setPullRequest(commit, owner, repo, number)
|
||||
setPullRequest(commit, owner, repo, number);
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy commits: pre-semantic commits
|
||||
if (!commit.type || commit.type === 'chore') {
|
||||
const commitMessageLC = commitMessage.toLocaleLowerCase()
|
||||
const commitMessageLC = commitMessage.toLocaleLowerCase();
|
||||
if ((match = commitMessageLC.match(/\bchore\((\w+)\):/))) {
|
||||
// example: 'Chore(docs): description'
|
||||
commit.type = knownTypes.has(match[1]) ? match[1] : 'chore'
|
||||
commit.type = knownTypes.has(match[1]) ? match[1] : 'chore';
|
||||
} else if (commitMessageLC.match(/\b(?:fix|fixes|fixed)/)) {
|
||||
// example: 'fix a bug'
|
||||
commit.type = 'fix'
|
||||
commit.type = 'fix';
|
||||
} else if (commitMessageLC.match(/\[(?:docs|doc)\]/)) {
|
||||
// example: '[docs]
|
||||
commit.type = 'doc'
|
||||
commit.type = 'doc';
|
||||
}
|
||||
}
|
||||
|
||||
commit.subject = subject.trim()
|
||||
return commit
|
||||
}
|
||||
commit.subject = subject.trim();
|
||||
return commit;
|
||||
};
|
||||
|
||||
const getLocalCommitHashes = async (dir, ref) => {
|
||||
const args = ['log', '-z', '--format=%H', ref]
|
||||
return (await runGit(dir, args)).split('\0').map(hash => hash.trim())
|
||||
}
|
||||
const args = ['log', '-z', '--format=%H', ref];
|
||||
return (await runGit(dir, args)).split('\0').map(hash => hash.trim());
|
||||
};
|
||||
|
||||
/*
|
||||
* possible properties:
|
||||
|
@ -267,75 +267,75 @@ const getLocalCommitHashes = async (dir, ref) => {
|
|||
* pr { owner, repo, number, branch }, revertHash, subject, type
|
||||
*/
|
||||
const getLocalCommitDetails = async (module, point1, point2) => {
|
||||
const { owner, repo, dir } = module
|
||||
const { owner, repo, dir } = module;
|
||||
|
||||
const fieldSep = '||'
|
||||
const format = ['%H', '%P', '%aE', '%B'].join(fieldSep)
|
||||
const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`]
|
||||
const commits = (await runGit(dir, args)).split('\0').map(field => field.trim())
|
||||
const details = []
|
||||
const fieldSep = '||';
|
||||
const format = ['%H', '%P', '%aE', '%B'].join(fieldSep);
|
||||
const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
||||
const commits = (await runGit(dir, args)).split('\0').map(field => field.trim());
|
||||
const details = [];
|
||||
for (const commit of commits) {
|
||||
if (!commit) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const [hash, parentHashes, email, commitMessage] = commit.split(fieldSep, 4).map(field => field.trim())
|
||||
const [hash, parentHashes, email, commitMessage] = commit.split(fieldSep, 4).map(field => field.trim());
|
||||
details.push(parseCommitMessage(commitMessage, owner, repo, {
|
||||
email,
|
||||
hash,
|
||||
owner,
|
||||
repo,
|
||||
parentHashes: parentHashes.split()
|
||||
}))
|
||||
}));
|
||||
}
|
||||
return details
|
||||
}
|
||||
return details;
|
||||
};
|
||||
|
||||
const checkCache = async (name, operation) => {
|
||||
const filename = path.resolve(CACHE_DIR, name)
|
||||
const filename = path.resolve(CACHE_DIR, name);
|
||||
if (fs.existsSync(filename)) {
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'))
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
||||
}
|
||||
const response = await operation()
|
||||
const response = await operation();
|
||||
if (response) {
|
||||
fs.writeFileSync(filename, JSON.stringify(response))
|
||||
fs.writeFileSync(filename, JSON.stringify(response));
|
||||
}
|
||||
return response
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
// helper function to add some resiliency to volatile GH api endpoints
|
||||
async function runRetryable (fn, maxRetries) {
|
||||
let lastError
|
||||
let lastError;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
return await fn()
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, CHECK_INTERVAL))
|
||||
lastError = error
|
||||
await new Promise((resolve, reject) => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error;
|
||||
}
|
||||
}
|
||||
// Silently eat 404s.
|
||||
if (lastError.status !== 404) throw lastError
|
||||
if (lastError.status !== 404) throw lastError;
|
||||
}
|
||||
|
||||
const getPullRequest = async (number, owner, repo) => {
|
||||
const name = `${owner}-${repo}-pull-${number}`
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo })
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT))
|
||||
}
|
||||
const name = `${owner}-${repo}-pull-${number}`;
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const getComments = async (number, owner, repo) => {
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 })
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT))
|
||||
}
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`;
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const addRepoToPool = async (pool, repo, from, to) => {
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to)
|
||||
const oldHashes = await getLocalCommitHashes(repo.dir, from)
|
||||
oldHashes.forEach(hash => { pool.processedHashes.add(hash) })
|
||||
const commits = await getLocalCommitDetails(repo, commonAncestor, to)
|
||||
pool.commits.push(...commits)
|
||||
}
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to);
|
||||
const oldHashes = await getLocalCommitHashes(repo.dir, from);
|
||||
oldHashes.forEach(hash => { pool.processedHashes.add(hash); });
|
||||
const commits = await getLocalCommitDetails(repo, commonAncestor, to);
|
||||
pool.commits.push(...commits);
|
||||
};
|
||||
|
||||
/***
|
||||
**** Other Repos
|
||||
|
@ -345,21 +345,21 @@ const addRepoToPool = async (pool, repo, from, to) => {
|
|||
|
||||
const getDepsVariable = async (ref, key) => {
|
||||
// get a copy of that reference point's DEPS file
|
||||
const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`])
|
||||
const filename = path.resolve(os.tmpdir(), 'DEPS')
|
||||
fs.writeFileSync(filename, deps)
|
||||
const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`]);
|
||||
const filename = path.resolve(os.tmpdir(), 'DEPS');
|
||||
fs.writeFileSync(filename, deps);
|
||||
|
||||
// query the DEPS file
|
||||
const response = childProcess.spawnSync(
|
||||
'gclient',
|
||||
['getdep', '--deps-file', filename, '--var', key],
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
);
|
||||
|
||||
// cleanup
|
||||
fs.unlinkSync(filename)
|
||||
return response.stdout.trim()
|
||||
}
|
||||
fs.unlinkSync(filename);
|
||||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
||||
const repos = [{ // just node
|
||||
|
@ -367,16 +367,16 @@ const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
|||
repo: 'node',
|
||||
dir: path.resolve(SRC_DIR, 'third_party', 'electron_node'),
|
||||
deps_variable_name: 'node_version'
|
||||
}]
|
||||
}];
|
||||
|
||||
for (const repo of repos) {
|
||||
// the 'DEPS' file holds the dependency reference point
|
||||
const key = repo.deps_variable_name
|
||||
const from = await getDepsVariable(fromRef, key)
|
||||
const to = await getDepsVariable(toRef, key)
|
||||
await addRepoToPool(pool, repo, from, to)
|
||||
const key = repo.deps_variable_name;
|
||||
const from = await getDepsVariable(fromRef, key);
|
||||
const to = await getDepsVariable(toRef, key);
|
||||
await addRepoToPool(pool, repo, from, to);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Changes are interesting if they make a change relative to a previous
|
||||
// release in the same series. For example if you fix a Y.0.0 bug, that
|
||||
|
@ -388,17 +388,17 @@ const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
|||
// branches' changes. Otherwise we will have an overwhelmingly long
|
||||
// list of mostly-irrelevant changes.
|
||||
const shouldIncludeMultibranchChanges = (version) => {
|
||||
let show = true
|
||||
let show = true;
|
||||
|
||||
if (semver.valid(version)) {
|
||||
const prerelease = semver.prerelease(version)
|
||||
const prerelease = semver.prerelease(version);
|
||||
show = prerelease
|
||||
? parseInt(prerelease.pop()) > 1
|
||||
: semver.patch(version) > 0
|
||||
: semver.patch(version) > 0;
|
||||
}
|
||||
|
||||
return show
|
||||
}
|
||||
return show;
|
||||
};
|
||||
|
||||
/***
|
||||
**** Main
|
||||
|
@ -406,131 +406,131 @@ const shouldIncludeMultibranchChanges = (version) => {
|
|||
|
||||
const getNotes = async (fromRef, toRef, newVersion) => {
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR)
|
||||
fs.mkdirSync(CACHE_DIR);
|
||||
}
|
||||
|
||||
const pool = {
|
||||
processedHashes: new Set(),
|
||||
commits: []
|
||||
}
|
||||
};
|
||||
|
||||
// get the electron/electron commits
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION }
|
||||
await addRepoToPool(pool, electron, fromRef, toRef)
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION };
|
||||
await addRepoToPool(pool, electron, fromRef, toRef);
|
||||
|
||||
// Don't include submodules if comparing across major versions;
|
||||
// there's just too much churn otherwise.
|
||||
const includeDeps = semver.valid(fromRef) &&
|
||||
semver.valid(toRef) &&
|
||||
semver.major(fromRef) === semver.major(toRef)
|
||||
semver.major(fromRef) === semver.major(toRef);
|
||||
|
||||
if (includeDeps) {
|
||||
await getDependencyCommitsGN(pool, fromRef, toRef)
|
||||
await getDependencyCommitsGN(pool, fromRef, toRef);
|
||||
}
|
||||
|
||||
// remove any old commits
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash))
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash));
|
||||
|
||||
// if a commmit _and_ revert occurred in the unprocessed set, skip them both
|
||||
for (const commit of pool.commits) {
|
||||
const revertHash = commit.revertHash
|
||||
const revertHash = commit.revertHash;
|
||||
if (!revertHash) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash)
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash);
|
||||
if (!revert) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
commit.note = NO_NOTES
|
||||
revert.note = NO_NOTES
|
||||
pool.processedHashes.add(commit.hash)
|
||||
pool.processedHashes.add(revertHash)
|
||||
commit.note = NO_NOTES;
|
||||
revert.note = NO_NOTES;
|
||||
pool.processedHashes.add(commit.hash);
|
||||
pool.processedHashes.add(revertHash);
|
||||
}
|
||||
|
||||
// scrape PRs for release note 'Notes:' comments
|
||||
for (const commit of pool.commits) {
|
||||
let pr = commit.pr
|
||||
let pr = commit.pr;
|
||||
|
||||
let prSubject
|
||||
let prSubject;
|
||||
while (pr && !commit.note) {
|
||||
const note = await getNoteFromClerk(pr.number, pr.owner, pr.repo)
|
||||
const note = await getNoteFromClerk(pr.number, pr.owner, pr.repo);
|
||||
if (note) {
|
||||
commit.note = note
|
||||
commit.note = note;
|
||||
}
|
||||
|
||||
// if we already have all the data we need, stop scraping the PRs
|
||||
if (commit.note && commit.type && prSubject) {
|
||||
break
|
||||
break;
|
||||
}
|
||||
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo);
|
||||
if (!prData || !prData.data) {
|
||||
break
|
||||
break;
|
||||
}
|
||||
|
||||
// try to pull a release note from the pull comment
|
||||
const prParsed = parseCommitMessage(`${prData.data.title}\n\n${prData.data.body}`, pr.owner, pr.repo)
|
||||
const prParsed = parseCommitMessage(`${prData.data.title}\n\n${prData.data.body}`, pr.owner, pr.repo);
|
||||
if (!commit.note) {
|
||||
commit.note = prParsed.note
|
||||
commit.note = prParsed.note;
|
||||
}
|
||||
if (!commit.type || prParsed.type === 'breaking-change') {
|
||||
commit.type = prParsed.type
|
||||
commit.type = prParsed.type;
|
||||
}
|
||||
prSubject = prSubject || prParsed.subject
|
||||
prSubject = prSubject || prParsed.subject;
|
||||
|
||||
pr = prParsed.pr && (prParsed.pr.number !== pr.number) ? prParsed.pr : null
|
||||
pr = prParsed.pr && (prParsed.pr.number !== pr.number) ? prParsed.pr : null;
|
||||
}
|
||||
|
||||
// if we still don't have a note, it's because someone missed a 'Notes:
|
||||
// comment in a PR somewhere... use the PR subject as a fallback.
|
||||
commit.note = commit.note || prSubject
|
||||
commit.note = commit.note || prSubject;
|
||||
}
|
||||
|
||||
// remove non-user-facing commits
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => commit.note !== NO_NOTES)
|
||||
.filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)))
|
||||
.filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)));
|
||||
|
||||
if (!shouldIncludeMultibranchChanges(newVersion)) {
|
||||
// load all the prDatas
|
||||
await Promise.all(
|
||||
pool.commits.map(commit => (async () => {
|
||||
const { pr } = commit
|
||||
const { pr } = commit;
|
||||
if (typeof pr === 'object') {
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo)
|
||||
const prData = await getPullRequest(pr.number, pr.owner, pr.repo);
|
||||
if (prData) {
|
||||
commit.prData = prData
|
||||
commit.prData = prData;
|
||||
}
|
||||
}
|
||||
})())
|
||||
)
|
||||
);
|
||||
|
||||
// remove items that already landed in a previous major/minor series
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => {
|
||||
if (!commit.prData) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
const reducer = (accumulator, current) => {
|
||||
if (!semver.valid(accumulator)) { return current }
|
||||
if (!semver.valid(current)) { return accumulator }
|
||||
return semver.lt(accumulator, current) ? accumulator : current
|
||||
}
|
||||
if (!semver.valid(accumulator)) { return current; }
|
||||
if (!semver.valid(current)) { return accumulator; }
|
||||
return semver.lt(accumulator, current) ? accumulator : current;
|
||||
};
|
||||
const earliestRelease = commit.prData.data.labels
|
||||
.map(label => label.name.match(/merged\/(\d+)-(\d+)-x/))
|
||||
.filter(label => !!label)
|
||||
.map(label => `${label[1]}.${label[2]}.0`)
|
||||
.reduce(reducer, null)
|
||||
.reduce(reducer, null);
|
||||
if (!semver.valid(earliestRelease)) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
return semver.diff(earliestRelease, newVersion).includes('patch')
|
||||
})
|
||||
return semver.diff(earliestRelease, newVersion).includes('patch');
|
||||
});
|
||||
}
|
||||
|
||||
pool.commits = removeSupercededChromiumUpdates(pool.commits)
|
||||
pool.commits = removeSupercededChromiumUpdates(pool.commits);
|
||||
|
||||
const notes = {
|
||||
breaking: [],
|
||||
|
@ -540,78 +540,78 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
other: [],
|
||||
unknown: [],
|
||||
name: newVersion
|
||||
}
|
||||
};
|
||||
|
||||
pool.commits.forEach(commit => {
|
||||
const str = commit.type
|
||||
const str = commit.type;
|
||||
if (!str) {
|
||||
notes.unknown.push(commit)
|
||||
notes.unknown.push(commit);
|
||||
} else if (breakTypes.has(str)) {
|
||||
notes.breaking.push(commit)
|
||||
notes.breaking.push(commit);
|
||||
} else if (docTypes.has(str)) {
|
||||
notes.docs.push(commit)
|
||||
notes.docs.push(commit);
|
||||
} else if (featTypes.has(str)) {
|
||||
notes.feat.push(commit)
|
||||
notes.feat.push(commit);
|
||||
} else if (fixTypes.has(str)) {
|
||||
notes.fix.push(commit)
|
||||
notes.fix.push(commit);
|
||||
} else if (otherTypes.has(str)) {
|
||||
notes.other.push(commit)
|
||||
notes.other.push(commit);
|
||||
} else {
|
||||
notes.unknown.push(commit)
|
||||
notes.unknown.push(commit);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return notes
|
||||
}
|
||||
return notes;
|
||||
};
|
||||
|
||||
const removeSupercededChromiumUpdates = (commits) => {
|
||||
const chromiumRegex = /^Updated Chromium to \d+\.\d+\.\d+\.\d+/
|
||||
const updates = commits.filter(commit => (commit.note || commit.subject).match(chromiumRegex))
|
||||
const keepers = commits.filter(commit => !updates.includes(commit))
|
||||
const chromiumRegex = /^Updated Chromium to \d+\.\d+\.\d+\.\d+/;
|
||||
const updates = commits.filter(commit => (commit.note || commit.subject).match(chromiumRegex));
|
||||
const keepers = commits.filter(commit => !updates.includes(commit));
|
||||
|
||||
// keep the newest update.
|
||||
if (updates.length) {
|
||||
updates.sort((a, b) => a.originalPr.number - b.originalPr.number)
|
||||
keepers.push(updates.pop())
|
||||
updates.sort((a, b) => a.originalPr.number - b.originalPr.number);
|
||||
keepers.push(updates.pop());
|
||||
}
|
||||
|
||||
return keepers
|
||||
}
|
||||
return keepers;
|
||||
};
|
||||
|
||||
/***
|
||||
**** Render
|
||||
***/
|
||||
|
||||
const renderLink = (commit, explicitLinks) => {
|
||||
let link
|
||||
const pr = commit.originalPr
|
||||
let link;
|
||||
const pr = commit.originalPr;
|
||||
if (pr) {
|
||||
const { owner, repo, number } = pr
|
||||
const url = `https://github.com/${owner}/${repo}/pull/${number}`
|
||||
const { owner, repo, number } = pr;
|
||||
const url = `https://github.com/${owner}/${repo}/pull/${number}`;
|
||||
const text = owner === 'electron' && repo === 'electron'
|
||||
? `#${number}`
|
||||
: `${owner}/${repo}#${number}`
|
||||
link = explicitLinks ? `[${text}](${url})` : text
|
||||
: `${owner}/${repo}#${number}`;
|
||||
link = explicitLinks ? `[${text}](${url})` : text;
|
||||
} else {
|
||||
const { owner, repo, hash } = commit
|
||||
const url = `https://github.com/${owner}/${repo}/commit/${hash}`
|
||||
const { owner, repo, hash } = commit;
|
||||
const url = `https://github.com/${owner}/${repo}/commit/${hash}`;
|
||||
const text = owner === 'electron' && repo === 'electron'
|
||||
? `${hash.slice(0, 8)}`
|
||||
: `${owner}/${repo}@${hash.slice(0, 8)}`
|
||||
link = explicitLinks ? `[${text}](${url})` : text
|
||||
: `${owner}/${repo}@${hash.slice(0, 8)}`;
|
||||
link = explicitLinks ? `[${text}](${url})` : text;
|
||||
}
|
||||
return link
|
||||
}
|
||||
return link;
|
||||
};
|
||||
|
||||
const renderCommit = (commit, explicitLinks) => {
|
||||
// clean up the note
|
||||
let note = commit.note || commit.subject
|
||||
note = note.trim()
|
||||
let note = commit.note || commit.subject;
|
||||
note = note.trim();
|
||||
if (note.length !== 0) {
|
||||
note = note[0].toUpperCase() + note.substr(1)
|
||||
note = note[0].toUpperCase() + note.substr(1);
|
||||
|
||||
if (!note.endsWith('.')) {
|
||||
note = note + '.'
|
||||
note = note + '.';
|
||||
}
|
||||
|
||||
const commonVerbs = {
|
||||
|
@ -631,57 +631,57 @@ const renderCommit = (commit, explicitLinks) => {
|
|||
Stopped: ['Stop'],
|
||||
Updated: ['Update'],
|
||||
Upgraded: ['Upgrade']
|
||||
}
|
||||
};
|
||||
for (const [key, values] of Object.entries(commonVerbs)) {
|
||||
for (const value of values) {
|
||||
const start = `${value} `
|
||||
const start = `${value} `;
|
||||
if (note.startsWith(start)) {
|
||||
note = `${key} ${note.slice(start.length)}`
|
||||
note = `${key} ${note.slice(start.length)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const link = renderLink(commit, explicitLinks)
|
||||
const link = renderLink(commit, explicitLinks);
|
||||
|
||||
return { note, link }
|
||||
}
|
||||
return { note, link };
|
||||
};
|
||||
|
||||
const renderNotes = (notes, explicitLinks) => {
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`]
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`];
|
||||
|
||||
const renderSection = (title, commits) => {
|
||||
if (commits.length === 0) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const notes = new Map()
|
||||
const notes = new Map();
|
||||
for (const note of commits.map(commit => renderCommit(commit, explicitLinks))) {
|
||||
if (!notes.has(note.note)) {
|
||||
notes.set(note.note, [note.link])
|
||||
notes.set(note.note, [note.link]);
|
||||
} else {
|
||||
notes.get(note.note).push(note.link)
|
||||
notes.get(note.note).push(note.link);
|
||||
}
|
||||
}
|
||||
rendered.push(`## ${title}\n\n`)
|
||||
const lines = []
|
||||
notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`))
|
||||
rendered.push(...lines.sort(), '\n')
|
||||
}
|
||||
rendered.push(`## ${title}\n\n`);
|
||||
const lines = [];
|
||||
notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`));
|
||||
rendered.push(...lines.sort(), '\n');
|
||||
};
|
||||
|
||||
renderSection('Breaking Changes', notes.breaking)
|
||||
renderSection('Features', notes.feat)
|
||||
renderSection('Fixes', notes.fix)
|
||||
renderSection('Other Changes', notes.other)
|
||||
renderSection('Breaking Changes', notes.breaking);
|
||||
renderSection('Features', notes.feat);
|
||||
renderSection('Fixes', notes.fix);
|
||||
renderSection('Other Changes', notes.other);
|
||||
|
||||
if (notes.docs.length) {
|
||||
const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort()
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n')
|
||||
const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort();
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n');
|
||||
}
|
||||
|
||||
renderSection('Unknown', notes.unknown)
|
||||
renderSection('Unknown', notes.unknown);
|
||||
|
||||
return rendered.join('')
|
||||
}
|
||||
return rendered.join('');
|
||||
};
|
||||
|
||||
/***
|
||||
**** Module
|
||||
|
@ -690,4 +690,4 @@ const renderNotes = (notes, explicitLinks) => {
|
|||
module.exports = {
|
||||
get: getNotes,
|
||||
render: renderNotes
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,105 +1,105 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['automaticRelease', 'notesOnly', 'stable']
|
||||
})
|
||||
const ciReleaseBuild = require('./ci-release-build')
|
||||
});
|
||||
const ciReleaseBuild = require('./ci-release-build');
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
const { execSync } = require('child_process')
|
||||
const { GitProcess } = require('dugite')
|
||||
});
|
||||
const { execSync } = require('child_process');
|
||||
const { GitProcess } = require('dugite');
|
||||
|
||||
const path = require('path')
|
||||
const readline = require('readline')
|
||||
const releaseNotesGenerator = require('./notes/index.js')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
|
||||
const bumpType = args._[0]
|
||||
const targetRepo = bumpType === 'nightly' ? 'nightlies' : 'electron'
|
||||
const path = require('path');
|
||||
const readline = require('readline');
|
||||
const releaseNotesGenerator = require('./notes/index.js');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js');
|
||||
const bumpType = args._[0];
|
||||
const targetRepo = bumpType === 'nightly' ? 'nightlies' : 'electron';
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
if (!bumpType && !args.notesOnly) {
|
||||
console.log('Usage: prepare-release [stable | minor | beta | nightly]' +
|
||||
' (--stable) (--notesOnly) (--automaticRelease) (--branch)')
|
||||
process.exit(1)
|
||||
' (--stable) (--notesOnly) (--automaticRelease) (--branch)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function getNewVersion (dryRun) {
|
||||
if (!dryRun) {
|
||||
console.log(`Bumping for new "${bumpType}" version.`)
|
||||
console.log(`Bumping for new "${bumpType}" version.`);
|
||||
}
|
||||
const bumpScript = path.join(__dirname, 'version-bumper.js')
|
||||
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`]
|
||||
if (dryRun) scriptArgs.push('--dryRun')
|
||||
const bumpScript = path.join(__dirname, 'version-bumper.js');
|
||||
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`];
|
||||
if (dryRun) scriptArgs.push('--dryRun');
|
||||
try {
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' })
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim()
|
||||
const newVersion = `v${bumpVersion}`
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' });
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim();
|
||||
const newVersion = `v${bumpVersion}`;
|
||||
if (!dryRun) {
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`)
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`);
|
||||
}
|
||||
return newVersion
|
||||
return newVersion;
|
||||
} catch (err) {
|
||||
console.log(`${fail} Could not bump version, error was:`, err)
|
||||
throw err
|
||||
console.log(`${fail} Could not bump version, error was:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function getReleaseNotes (currentBranch, newVersion) {
|
||||
if (bumpType === 'nightly') {
|
||||
return { text: 'Nightlies do not get release notes, please compare tags for info.' }
|
||||
return { text: 'Nightlies do not get release notes, please compare tags for info.' };
|
||||
}
|
||||
console.log(`Generating release notes for ${currentBranch}.`)
|
||||
const releaseNotes = await releaseNotesGenerator(currentBranch, newVersion)
|
||||
console.log(`Generating release notes for ${currentBranch}.`);
|
||||
const releaseNotes = await releaseNotesGenerator(currentBranch, newVersion);
|
||||
if (releaseNotes.warning) {
|
||||
console.warn(releaseNotes.warning)
|
||||
console.warn(releaseNotes.warning);
|
||||
}
|
||||
return releaseNotes
|
||||
return releaseNotes;
|
||||
}
|
||||
|
||||
async function createRelease (branchToTarget, isBeta) {
|
||||
const newVersion = await getNewVersion()
|
||||
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion)
|
||||
await tagRelease(newVersion)
|
||||
const newVersion = await getNewVersion();
|
||||
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion);
|
||||
await tagRelease(newVersion);
|
||||
|
||||
console.log('Checking for existing draft release.')
|
||||
console.log('Checking for existing draft release.');
|
||||
const releases = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: targetRepo
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Could not get releases. Error was: `, err)
|
||||
})
|
||||
console.log(`${fail} Could not get releases. Error was: `, err);
|
||||
});
|
||||
|
||||
const drafts = releases.data.filter(release => release.draft &&
|
||||
release.tag_name === newVersion)
|
||||
release.tag_name === newVersion);
|
||||
if (drafts.length > 0) {
|
||||
console.log(`${fail} Aborting because draft release for
|
||||
${drafts[0].tag_name} already exists.`)
|
||||
process.exit(1)
|
||||
${drafts[0].tag_name} already exists.`);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} A draft release does not exist; creating one.`)
|
||||
console.log(`${pass} A draft release does not exist; creating one.`);
|
||||
|
||||
let releaseBody
|
||||
let releaseIsPrelease = false
|
||||
let releaseBody;
|
||||
let releaseIsPrelease = false;
|
||||
if (isBeta) {
|
||||
if (newVersion.indexOf('nightly') > 0) {
|
||||
releaseBody = 'Note: This is a nightly release. Please file new issues ' +
|
||||
'for any bugs you find in it.\n \n This release is published to npm ' +
|
||||
'under the nightly tag and can be installed via npm install electron@nightly, ' +
|
||||
`or npm i electron-nightly@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
|
||||
`or npm i electron-nightly@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`;
|
||||
} else {
|
||||
releaseBody = 'Note: This is a beta release. Please file new issues ' +
|
||||
'for any bugs you find in it.\n \n This release is published to npm ' +
|
||||
'under the beta tag and can be installed via npm install electron@beta, ' +
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`
|
||||
`or npm i electron@${newVersion.substr(1)}.\n \n ${releaseNotes.text}`;
|
||||
}
|
||||
releaseIsPrelease = true
|
||||
releaseIsPrelease = true;
|
||||
} else {
|
||||
releaseBody = releaseNotes.text
|
||||
releaseBody = releaseNotes.text;
|
||||
}
|
||||
|
||||
const release = await octokit.repos.createRelease({
|
||||
|
@ -112,22 +112,22 @@ async function createRelease (branchToTarget, isBeta) {
|
|||
prerelease: releaseIsPrelease,
|
||||
target_commitish: newVersion.indexOf('nightly') !== -1 ? 'master' : branchToTarget
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error creating new release: `, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error creating new release: `, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
console.log(`Release has been created with id: ${release.data.id}.`)
|
||||
console.log(`${pass} Draft release for ${newVersion} successful.`)
|
||||
console.log(`Release has been created with id: ${release.data.id}.`);
|
||||
console.log(`${pass} Draft release for ${newVersion} successful.`);
|
||||
}
|
||||
|
||||
async function pushRelease (branch) {
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully pushed the release. Wait for ` +
|
||||
'release builds to finish before running "npm run release".')
|
||||
'release builds to finish before running "npm run release".');
|
||||
} else {
|
||||
console.log(`${fail} Error pushing the release: ${pushDetails.stderr}`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error pushing the release: ${pushDetails.stderr}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,34 +135,34 @@ async function runReleaseBuilds (branch) {
|
|||
await ciReleaseBuild(branch, {
|
||||
ghRelease: true,
|
||||
automaticRelease: args.automaticRelease
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
async function tagRelease (version) {
|
||||
console.log(`Tagging release ${version}.`)
|
||||
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR)
|
||||
console.log(`Tagging release ${version}.`);
|
||||
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR);
|
||||
if (checkoutDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully tagged ${version}.`)
|
||||
console.log(`${pass} Successfully tagged ${version}.`);
|
||||
} else {
|
||||
console.log(`${fail} Error tagging ${version}: ` +
|
||||
`${checkoutDetails.stderr}`)
|
||||
process.exit(1)
|
||||
`${checkoutDetails.stderr}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyNewVersion () {
|
||||
const newVersion = await getNewVersion(true)
|
||||
let response
|
||||
const newVersion = await getNewVersion(true);
|
||||
let response;
|
||||
if (args.automaticRelease) {
|
||||
response = 'y'
|
||||
response = 'y';
|
||||
} else {
|
||||
response = await promptForVersion(newVersion)
|
||||
response = await promptForVersion(newVersion);
|
||||
}
|
||||
if (response.match(/^y/i)) {
|
||||
console.log(`${pass} Starting release of ${newVersion}`)
|
||||
console.log(`${pass} Starting release of ${newVersion}`);
|
||||
} else {
|
||||
console.log(`${fail} Aborting release of ${newVersion}`)
|
||||
process.exit()
|
||||
console.log(`${fail} Aborting release of ${newVersion}`);
|
||||
process.exit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,44 +171,44 @@ async function promptForVersion (version) {
|
|||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
});
|
||||
rl.question(`Do you want to create the release ${version.green} (y/N)? `, (answer) => {
|
||||
rl.close()
|
||||
resolve(answer)
|
||||
})
|
||||
})
|
||||
rl.close();
|
||||
resolve(answer);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// function to determine if there have been commits to master since the last release
|
||||
async function changesToRelease () {
|
||||
const lastCommitWasRelease = new RegExp('^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$', 'g')
|
||||
const lastCommit = await GitProcess.exec(['log', '-n', '1', '--pretty=format:\'%s\''], ELECTRON_DIR)
|
||||
return !lastCommitWasRelease.test(lastCommit.stdout)
|
||||
const lastCommitWasRelease = new RegExp('^Bump v[0-9.]*(-beta[0-9.]*)?(-nightly[0-9.]*)?$', 'g');
|
||||
const lastCommit = await GitProcess.exec(['log', '-n', '1', '--pretty=format:\'%s\''], ELECTRON_DIR);
|
||||
return !lastCommitWasRelease.test(lastCommit.stdout);
|
||||
}
|
||||
|
||||
async function prepareRelease (isBeta, notesOnly) {
|
||||
if (args.dryRun) {
|
||||
const newVersion = await getNewVersion(true)
|
||||
console.log(newVersion)
|
||||
const newVersion = await getNewVersion(true);
|
||||
console.log(newVersion);
|
||||
} else {
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR)
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR);
|
||||
if (notesOnly) {
|
||||
const newVersion = await getNewVersion(true)
|
||||
const releaseNotes = await getReleaseNotes(currentBranch, newVersion)
|
||||
console.log(`Draft release notes are: \n${releaseNotes.text}`)
|
||||
const newVersion = await getNewVersion(true);
|
||||
const releaseNotes = await getReleaseNotes(currentBranch, newVersion);
|
||||
console.log(`Draft release notes are: \n${releaseNotes.text}`);
|
||||
} else {
|
||||
const changes = await changesToRelease(currentBranch)
|
||||
const changes = await changesToRelease(currentBranch);
|
||||
if (changes) {
|
||||
await verifyNewVersion()
|
||||
await createRelease(currentBranch, isBeta)
|
||||
await pushRelease(currentBranch)
|
||||
await runReleaseBuilds(currentBranch)
|
||||
await verifyNewVersion();
|
||||
await createRelease(currentBranch, isBeta);
|
||||
await pushRelease(currentBranch);
|
||||
await runReleaseBuilds(currentBranch);
|
||||
} else {
|
||||
console.log('There are no new changes to this branch since the last release, aborting release.')
|
||||
process.exit(1)
|
||||
console.log('There are no new changes to this branch since the last release, aborting release.');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prepareRelease(!args.stable, args.notesOnly)
|
||||
prepareRelease(!args.stable, args.notesOnly);
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
const temp = require('temp')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const childProcess = require('child_process')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils')
|
||||
const request = require('request')
|
||||
const semver = require('semver')
|
||||
const rootPackageJson = require('../../package.json')
|
||||
const temp = require('temp');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const childProcess = require('child_process');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils');
|
||||
const request = require('request');
|
||||
const semver = require('semver');
|
||||
const rootPackageJson = require('../../package.json');
|
||||
const octokit = require('@octokit/rest')({
|
||||
headers: { 'User-Agent': 'electron-npm-publisher' }
|
||||
})
|
||||
});
|
||||
|
||||
if (!process.env.ELECTRON_NPM_OTP) {
|
||||
console.error('Please set ELECTRON_NPM_OTP')
|
||||
process.exit(1)
|
||||
console.error('Please set ELECTRON_NPM_OTP');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let tempDir
|
||||
temp.track() // track and cleanup files at exit
|
||||
let tempDir;
|
||||
temp.track(); // track and cleanup files at exit
|
||||
|
||||
const files = [
|
||||
'cli.js',
|
||||
|
@ -25,7 +25,7 @@ const files = [
|
|||
'package.json',
|
||||
'README.md',
|
||||
'LICENSE'
|
||||
]
|
||||
];
|
||||
|
||||
const jsonFields = [
|
||||
'name',
|
||||
|
@ -35,58 +35,58 @@ const jsonFields = [
|
|||
'license',
|
||||
'author',
|
||||
'keywords'
|
||||
]
|
||||
];
|
||||
|
||||
let npmTag = ''
|
||||
let npmTag = '';
|
||||
|
||||
new Promise((resolve, reject) => {
|
||||
temp.mkdir('electron-npm', (err, dirPath) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(dirPath)
|
||||
resolve(dirPath);
|
||||
}
|
||||
})
|
||||
});
|
||||
})
|
||||
.then((dirPath) => {
|
||||
tempDir = dirPath
|
||||
tempDir = dirPath;
|
||||
// copy files from `/npm` to temp directory
|
||||
files.forEach((name) => {
|
||||
const noThirdSegment = name === 'README.md' || name === 'LICENSE'
|
||||
const noThirdSegment = name === 'README.md' || name === 'LICENSE';
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, name),
|
||||
fs.readFileSync(path.join(ELECTRON_DIR, noThirdSegment ? '' : 'npm', name))
|
||||
)
|
||||
})
|
||||
);
|
||||
});
|
||||
// copy from root package.json to temp/package.json
|
||||
const packageJson = require(path.join(tempDir, 'package.json'))
|
||||
const packageJson = require(path.join(tempDir, 'package.json'));
|
||||
jsonFields.forEach((fieldName) => {
|
||||
packageJson[fieldName] = rootPackageJson[fieldName]
|
||||
})
|
||||
packageJson[fieldName] = rootPackageJson[fieldName];
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2)
|
||||
)
|
||||
);
|
||||
|
||||
return octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: rootPackageJson.version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
})
|
||||
});
|
||||
})
|
||||
.then((releases) => {
|
||||
// download electron.d.ts from release
|
||||
const release = releases.data.find(
|
||||
(release) => release.tag_name === `v${rootPackageJson.version}`
|
||||
)
|
||||
);
|
||||
if (!release) {
|
||||
throw new Error(`cannot find release with tag v${rootPackageJson.version}`)
|
||||
throw new Error(`cannot find release with tag v${rootPackageJson.version}`);
|
||||
}
|
||||
return release
|
||||
return release;
|
||||
})
|
||||
.then((release) => {
|
||||
const tsdAsset = release.assets.find((asset) => asset.name === 'electron.d.ts')
|
||||
const tsdAsset = release.assets.find((asset) => asset.name === 'electron.d.ts');
|
||||
if (!tsdAsset) {
|
||||
throw new Error(`cannot find electron.d.ts from v${rootPackageJson.version} release assets`)
|
||||
throw new Error(`cannot find electron.d.ts from v${rootPackageJson.version} release assets`);
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
request.get({
|
||||
|
@ -97,78 +97,78 @@ new Promise((resolve, reject) => {
|
|||
}
|
||||
}, (err, response, body) => {
|
||||
if (err || response.statusCode !== 200) {
|
||||
reject(err || new Error('Cannot download electron.d.ts'))
|
||||
reject(err || new Error('Cannot download electron.d.ts'));
|
||||
} else {
|
||||
fs.writeFileSync(path.join(tempDir, 'electron.d.ts'), body)
|
||||
resolve(release)
|
||||
fs.writeFileSync(path.join(tempDir, 'electron.d.ts'), body);
|
||||
resolve(release);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
})
|
||||
.then(async (release) => {
|
||||
const currentBranch = await getCurrentBranch()
|
||||
const currentBranch = await getCurrentBranch();
|
||||
|
||||
if (release.tag_name.indexOf('nightly') > 0) {
|
||||
if (currentBranch === 'master') {
|
||||
// Nightlies get published to their own module, so master nightlies should be tagged as latest
|
||||
npmTag = 'latest'
|
||||
npmTag = 'latest';
|
||||
} else {
|
||||
npmTag = `nightly-${currentBranch}`
|
||||
npmTag = `nightly-${currentBranch}`;
|
||||
}
|
||||
|
||||
const currentJson = JSON.parse(fs.readFileSync(path.join(tempDir, 'package.json'), 'utf8'))
|
||||
currentJson.name = 'electron-nightly'
|
||||
rootPackageJson.name = 'electron-nightly'
|
||||
const currentJson = JSON.parse(fs.readFileSync(path.join(tempDir, 'package.json'), 'utf8'));
|
||||
currentJson.name = 'electron-nightly';
|
||||
rootPackageJson.name = 'electron-nightly';
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'package.json'),
|
||||
JSON.stringify(currentJson, null, 2)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
if (currentBranch === 'master') {
|
||||
// This should never happen, master releases should be nightly releases
|
||||
// this is here just-in-case
|
||||
npmTag = 'master'
|
||||
npmTag = 'master';
|
||||
} else if (!release.prerelease) {
|
||||
// Tag the release with a `2-0-x` style tag
|
||||
npmTag = currentBranch
|
||||
npmTag = currentBranch;
|
||||
} else {
|
||||
// Tag the release with a `beta-3-0-x` style tag
|
||||
npmTag = `beta-${currentBranch}`
|
||||
npmTag = `beta-${currentBranch}`;
|
||||
}
|
||||
}
|
||||
})
|
||||
.then(() => childProcess.execSync('npm pack', { cwd: tempDir }))
|
||||
.then(() => {
|
||||
// test that the package can install electron prebuilt from github release
|
||||
const tarballPath = path.join(tempDir, `${rootPackageJson.name}-${rootPackageJson.version}.tgz`)
|
||||
const tarballPath = path.join(tempDir, `${rootPackageJson.name}-${rootPackageJson.version}.tgz`);
|
||||
return new Promise((resolve, reject) => {
|
||||
childProcess.execSync(`npm install ${tarballPath} --force --silent`, {
|
||||
env: Object.assign({}, process.env, { electron_config_cache: tempDir }),
|
||||
cwd: tempDir
|
||||
})
|
||||
resolve(tarballPath)
|
||||
})
|
||||
});
|
||||
resolve(tarballPath);
|
||||
});
|
||||
})
|
||||
.then((tarballPath) => childProcess.execSync(`npm publish ${tarballPath} --tag ${npmTag} --otp=${process.env.ELECTRON_NPM_OTP}`))
|
||||
.then(() => {
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString())
|
||||
const localVersion = rootPackageJson.version
|
||||
const parsedLocalVersion = semver.parse(localVersion)
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString());
|
||||
const localVersion = rootPackageJson.version;
|
||||
const parsedLocalVersion = semver.parse(localVersion);
|
||||
if (rootPackageJson.name === 'electron') {
|
||||
// We should only customly add dist tags for non-nightly releases where the package name is still
|
||||
// "electron"
|
||||
if (parsedLocalVersion.prerelease.length === 0 &&
|
||||
semver.gt(localVersion, currentTags.latest)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} latest --otp=${process.env.ELECTRON_NPM_OTP}`);
|
||||
}
|
||||
if (parsedLocalVersion.prerelease[0] === 'beta' &&
|
||||
semver.gt(localVersion, currentTags.beta)) {
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`)
|
||||
childProcess.execSync(`npm dist-tag add electron@${localVersion} beta --otp=${process.env.ELECTRON_NPM_OTP}`);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`Error: ${err}`)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(`Error: ${err}`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,39 +1,39 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
string: ['tag', 'releaseID'],
|
||||
default: { releaseID: '' }
|
||||
})
|
||||
const path = require('path')
|
||||
const { execSync } = require('child_process')
|
||||
const { GitProcess } = require('dugite')
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js')
|
||||
});
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { GitProcess } = require('dugite');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
function getLastBumpCommit (tag) {
|
||||
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString()
|
||||
return JSON.parse(data)
|
||||
const data = execSync(`git log -n1 --grep "Bump ${tag}" --format='format:{"hash": "%H", "message": "%s"}'`).toString();
|
||||
return JSON.parse(data);
|
||||
}
|
||||
|
||||
async function revertBumpCommit (tag) {
|
||||
const branch = await getCurrentBranch()
|
||||
const commitToRevert = getLastBumpCommit(tag).hash
|
||||
await GitProcess.exec(['revert', commitToRevert], ELECTRON_DIR)
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR)
|
||||
const branch = await getCurrentBranch();
|
||||
const commitToRevert = getLastBumpCommit(tag).hash;
|
||||
await GitProcess.exec(['revert', commitToRevert], ELECTRON_DIR);
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} successfully reverted release commit.`)
|
||||
console.log(`${pass} successfully reverted release commit.`);
|
||||
} else {
|
||||
const error = GitProcess.parseError(pushDetails.stderr)
|
||||
console.error(`${fail} could not push release commit: `, error)
|
||||
process.exit(1)
|
||||
const error = GitProcess.parseError(pushDetails.stderr);
|
||||
console.error(`${fail} could not push release commit: `, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,22 +43,22 @@ async function deleteDraft (releaseId, targetRepo) {
|
|||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: parseInt(releaseId, 10)
|
||||
})
|
||||
});
|
||||
if (!result.data.draft) {
|
||||
console.log(`${fail} published releases cannot be deleted.`)
|
||||
return false
|
||||
console.log(`${fail} published releases cannot be deleted.`);
|
||||
return false;
|
||||
} else {
|
||||
await octokit.repos.deleteRelease({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: result.data.id
|
||||
})
|
||||
});
|
||||
}
|
||||
console.log(`${pass} successfully deleted draft with id ${releaseId} from ${targetRepo}`)
|
||||
return true
|
||||
console.log(`${pass} successfully deleted draft with id ${releaseId} from ${targetRepo}`);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error(`${fail} couldn't delete draft with id ${releaseId} from ${targetRepo}: `, err)
|
||||
return false
|
||||
console.error(`${fail} couldn't delete draft with id ${releaseId} from ${targetRepo}: `, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,42 +68,42 @@ async function deleteTag (tag, targetRepo) {
|
|||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
ref: `tags/${tag}`
|
||||
})
|
||||
console.log(`${pass} successfully deleted tag ${tag} from ${targetRepo}`)
|
||||
});
|
||||
console.log(`${pass} successfully deleted tag ${tag} from ${targetRepo}`);
|
||||
} catch (err) {
|
||||
console.log(`${fail} couldn't delete tag ${tag} from ${targetRepo}: `, err)
|
||||
console.log(`${fail} couldn't delete tag ${tag} from ${targetRepo}: `, err);
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanReleaseArtifacts () {
|
||||
const releaseId = args.releaseID.length > 0 ? args.releaseID : null
|
||||
const isNightly = args.tag.includes('nightly')
|
||||
const releaseId = args.releaseID.length > 0 ? args.releaseID : null;
|
||||
const isNightly = args.tag.includes('nightly');
|
||||
|
||||
// try to revert commit regardless of tag and draft deletion status
|
||||
await revertBumpCommit(args.tag)
|
||||
await revertBumpCommit(args.tag);
|
||||
|
||||
if (releaseId) {
|
||||
if (isNightly) {
|
||||
await deleteDraft(releaseId, 'nightlies')
|
||||
await deleteDraft(releaseId, 'nightlies');
|
||||
|
||||
// We only need to delete the Electron tag since the
|
||||
// nightly tag is only created at publish-time.
|
||||
await deleteTag(args.tag, 'electron')
|
||||
await deleteTag(args.tag, 'electron');
|
||||
} else {
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, 'electron')
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, 'electron');
|
||||
// don't delete tag unless draft deleted successfully
|
||||
if (deletedElectronDraft) {
|
||||
await deleteTag(args.tag, 'electron')
|
||||
await deleteTag(args.tag, 'electron');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await Promise.all([
|
||||
deleteTag(args.tag, 'electron'),
|
||||
deleteTag(args.tag, 'nightlies')
|
||||
])
|
||||
]);
|
||||
}
|
||||
|
||||
console.log(`${pass} failed release artifact cleanup complete`)
|
||||
console.log(`${pass} failed release artifact cleanup complete`);
|
||||
}
|
||||
|
||||
cleanReleaseArtifacts()
|
||||
cleanReleaseArtifacts();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: [
|
||||
|
@ -10,87 +10,87 @@ const args = require('minimist')(process.argv.slice(2), {
|
|||
'verboseNugget'
|
||||
],
|
||||
default: { verboseNugget: false }
|
||||
})
|
||||
const fs = require('fs')
|
||||
const { execSync } = require('child_process')
|
||||
const nugget = require('nugget')
|
||||
const got = require('got')
|
||||
const pkg = require('../../package.json')
|
||||
const pkgVersion = `v${pkg.version}`
|
||||
const path = require('path')
|
||||
const sumchecker = require('sumchecker')
|
||||
const temp = require('temp').track()
|
||||
const { URL } = require('url')
|
||||
});
|
||||
const fs = require('fs');
|
||||
const { execSync } = require('child_process');
|
||||
const nugget = require('nugget');
|
||||
const got = require('got');
|
||||
const pkg = require('../../package.json');
|
||||
const pkgVersion = `v${pkg.version}`;
|
||||
const path = require('path');
|
||||
const sumchecker = require('sumchecker');
|
||||
const temp = require('temp').track();
|
||||
const { URL } = require('url');
|
||||
|
||||
require('colors')
|
||||
const pass = '✓'.green
|
||||
const fail = '✗'.red
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
let failureCount = 0
|
||||
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
let failureCount = 0;
|
||||
|
||||
async function getDraftRelease (version, skipValidation) {
|
||||
const releaseInfo = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: targetRepo
|
||||
})
|
||||
});
|
||||
|
||||
const versionToCheck = version || pkgVersion
|
||||
const versionToCheck = version || pkgVersion;
|
||||
const drafts = releaseInfo.data.filter(release => {
|
||||
return release.tag_name === versionToCheck && release.draft === true
|
||||
})
|
||||
return release.tag_name === versionToCheck && release.draft === true;
|
||||
});
|
||||
|
||||
const draft = drafts[0]
|
||||
const draft = drafts[0];
|
||||
if (!skipValidation) {
|
||||
failureCount = 0
|
||||
check(drafts.length === 1, 'one draft exists', true)
|
||||
failureCount = 0;
|
||||
check(drafts.length === 1, 'one draft exists', true);
|
||||
if (versionToCheck.indexOf('beta') > -1) {
|
||||
check(draft.prerelease, 'draft is a prerelease')
|
||||
check(draft.prerelease, 'draft is a prerelease');
|
||||
}
|
||||
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes')
|
||||
check((failureCount === 0), 'Draft release looks good to go.', true)
|
||||
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes');
|
||||
check((failureCount === 0), 'Draft release looks good to go.', true);
|
||||
}
|
||||
return draft
|
||||
return draft;
|
||||
}
|
||||
|
||||
async function validateReleaseAssets (release, validatingRelease) {
|
||||
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort()
|
||||
const extantAssets = release.assets.map(asset => asset.name).sort()
|
||||
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort()
|
||||
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
|
||||
const extantAssets = release.assets.map(asset => asset.name).sort();
|
||||
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort();
|
||||
|
||||
failureCount = 0
|
||||
failureCount = 0;
|
||||
requiredAssets.forEach(asset => {
|
||||
check(extantAssets.includes(asset), asset)
|
||||
})
|
||||
check((failureCount === 0), 'All required GitHub assets exist for release', true)
|
||||
check(extantAssets.includes(asset), asset);
|
||||
});
|
||||
check((failureCount === 0), 'All required GitHub assets exist for release', true);
|
||||
|
||||
if (!validatingRelease || !release.draft) {
|
||||
if (release.draft) {
|
||||
await verifyAssets(release)
|
||||
await verifyAssets(release);
|
||||
} else {
|
||||
await verifyShasums(downloadUrls)
|
||||
.catch(err => {
|
||||
console.log(`${fail} error verifyingShasums`, err)
|
||||
})
|
||||
console.log(`${fail} error verifyingShasums`, err);
|
||||
});
|
||||
}
|
||||
const s3Urls = s3UrlsForVersion(release.tag_name)
|
||||
await verifyShasums(s3Urls, true)
|
||||
const s3Urls = s3UrlsForVersion(release.tag_name);
|
||||
await verifyShasums(s3Urls, true);
|
||||
}
|
||||
}
|
||||
|
||||
function check (condition, statement, exitIfFail = false) {
|
||||
if (condition) {
|
||||
console.log(`${pass} ${statement}`)
|
||||
console.log(`${pass} ${statement}`);
|
||||
} else {
|
||||
failureCount++
|
||||
console.log(`${fail} ${statement}`)
|
||||
if (exitIfFail) process.exit(1)
|
||||
failureCount++;
|
||||
console.log(`${fail} ${statement}`);
|
||||
if (exitIfFail) process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -153,15 +153,15 @@ function assetsForVersion (version, validatingRelease) {
|
|||
`electron-${version}-win32-ia32-toolchain-profile.zip`,
|
||||
`electron-${version}-win32-x64-toolchain-profile.zip`,
|
||||
`electron-${version}-win32-arm64-toolchain-profile.zip`
|
||||
]
|
||||
];
|
||||
if (!validatingRelease) {
|
||||
patterns.push('SHASUMS256.txt')
|
||||
patterns.push('SHASUMS256.txt');
|
||||
}
|
||||
return patterns
|
||||
return patterns;
|
||||
}
|
||||
|
||||
function s3UrlsForVersion (version) {
|
||||
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/'
|
||||
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
|
||||
const patterns = [
|
||||
`${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`,
|
||||
`${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`,
|
||||
|
@ -173,66 +173,66 @@ function s3UrlsForVersion (version) {
|
|||
`${bucket}atom-shell/dist/${version}/SHASUMS.txt`,
|
||||
`${bucket}atom-shell/dist/${version}/SHASUMS256.txt`,
|
||||
`${bucket}atom-shell/dist/index.json`
|
||||
]
|
||||
return patterns
|
||||
];
|
||||
return patterns;
|
||||
}
|
||||
|
||||
function runScript (scriptName, scriptArgs, cwd) {
|
||||
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`
|
||||
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
|
||||
const scriptOptions = {
|
||||
encoding: 'UTF-8'
|
||||
}
|
||||
if (cwd) scriptOptions.cwd = cwd
|
||||
};
|
||||
if (cwd) scriptOptions.cwd = cwd;
|
||||
try {
|
||||
return execSync(scriptCommand, scriptOptions)
|
||||
return execSync(scriptCommand, scriptOptions);
|
||||
} catch (err) {
|
||||
console.log(`${fail} Error running ${scriptName}`, err)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error running ${scriptName}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function uploadNodeShasums () {
|
||||
console.log('Uploading Node SHASUMS file to S3.')
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py')
|
||||
runScript(scriptPath, ['-v', pkgVersion])
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to S3.`)
|
||||
console.log('Uploading Node SHASUMS file to S3.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
|
||||
runScript(scriptPath, ['-v', pkgVersion]);
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to S3.`);
|
||||
}
|
||||
|
||||
function uploadIndexJson () {
|
||||
console.log('Uploading index.json to S3.')
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py')
|
||||
runScript(scriptPath, [pkgVersion])
|
||||
console.log(`${pass} Done uploading index.json to S3.`)
|
||||
console.log('Uploading index.json to S3.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
|
||||
runScript(scriptPath, [pkgVersion]);
|
||||
console.log(`${pass} Done uploading index.json to S3.`);
|
||||
}
|
||||
|
||||
async function createReleaseShasums (release) {
|
||||
const fileName = 'SHASUMS256.txt'
|
||||
const existingAssets = release.assets.filter(asset => asset.name === fileName)
|
||||
const fileName = 'SHASUMS256.txt';
|
||||
const existingAssets = release.assets.filter(asset => asset.name === fileName);
|
||||
if (existingAssets.length > 0) {
|
||||
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`)
|
||||
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`);
|
||||
await octokit.repos.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err)
|
||||
})
|
||||
console.log(`${fail} Error deleting ${fileName} on GitHub:`, err);
|
||||
});
|
||||
}
|
||||
console.log(`Creating and uploading the release ${fileName}.`)
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py')
|
||||
const checksums = runScript(scriptPath, ['-v', pkgVersion])
|
||||
console.log(`Creating and uploading the release ${fileName}.`);
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py');
|
||||
const checksums = runScript(scriptPath, ['-v', pkgVersion]);
|
||||
|
||||
console.log(`${pass} Generated release SHASUMS.`)
|
||||
const filePath = await saveShaSumFile(checksums, fileName)
|
||||
console.log(`${pass} Generated release SHASUMS.`);
|
||||
const filePath = await saveShaSumFile(checksums, fileName);
|
||||
|
||||
console.log(`${pass} Created ${fileName} file.`)
|
||||
await uploadShasumFile(filePath, fileName, release.id)
|
||||
console.log(`${pass} Created ${fileName} file.`);
|
||||
await uploadShasumFile(filePath, fileName, release.id);
|
||||
|
||||
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`)
|
||||
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
|
||||
}
|
||||
|
||||
async function uploadShasumFile (filePath, fileName, releaseId) {
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
|
||||
return octokit.repos.uploadReleaseAsset({
|
||||
url: uploadUrl,
|
||||
headers: {
|
||||
|
@ -242,29 +242,29 @@ async function uploadShasumFile (filePath, fileName, releaseId) {
|
|||
file: fs.createReadStream(filePath),
|
||||
name: fileName
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error uploading ${filePath} to GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function saveShaSumFile (checksums, fileName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
temp.open(fileName, (err, info) => {
|
||||
if (err) {
|
||||
console.log(`${fail} Could not create ${fileName} file`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Could not create ${fileName} file`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
fs.writeFileSync(info.fd, checksums)
|
||||
fs.writeFileSync(info.fd, checksums);
|
||||
fs.close(info.fd, (err) => {
|
||||
if (err) {
|
||||
console.log(`${fail} Could close ${fileName} file`)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Could close ${fileName} file`);
|
||||
process.exit(1);
|
||||
}
|
||||
resolve(info.path)
|
||||
})
|
||||
resolve(info.path);
|
||||
});
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function publishRelease (release) {
|
||||
|
@ -275,34 +275,34 @@ async function publishRelease (release) {
|
|||
tag_name: release.tag_name,
|
||||
draft: false
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Error publishing release:`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error publishing release:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async function makeRelease (releaseToValidate) {
|
||||
if (releaseToValidate) {
|
||||
if (releaseToValidate === true) {
|
||||
releaseToValidate = pkgVersion
|
||||
releaseToValidate = pkgVersion;
|
||||
} else {
|
||||
console.log('Release to validate !=== true')
|
||||
console.log('Release to validate !=== true');
|
||||
}
|
||||
console.log(`Validating release ${releaseToValidate}`)
|
||||
const release = await getDraftRelease(releaseToValidate)
|
||||
await validateReleaseAssets(release, true)
|
||||
console.log(`Validating release ${releaseToValidate}`);
|
||||
const release = await getDraftRelease(releaseToValidate);
|
||||
await validateReleaseAssets(release, true);
|
||||
} else {
|
||||
let draftRelease = await getDraftRelease()
|
||||
uploadNodeShasums()
|
||||
uploadIndexJson()
|
||||
let draftRelease = await getDraftRelease();
|
||||
uploadNodeShasums();
|
||||
uploadIndexJson();
|
||||
|
||||
await createReleaseShasums(draftRelease)
|
||||
await createReleaseShasums(draftRelease);
|
||||
|
||||
// Fetch latest version of release before verifying
|
||||
draftRelease = await getDraftRelease(pkgVersion, true)
|
||||
await validateReleaseAssets(draftRelease)
|
||||
await publishRelease(draftRelease)
|
||||
draftRelease = await getDraftRelease(pkgVersion, true);
|
||||
await validateReleaseAssets(draftRelease);
|
||||
await publishRelease(draftRelease);
|
||||
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
|
||||
'"npm run publish-to-npm" to publish release to npm.')
|
||||
'"npm run publish-to-npm" to publish release to npm.');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,19 +310,19 @@ async function makeTempDir () {
|
|||
return new Promise((resolve, reject) => {
|
||||
temp.mkdir('electron-publish', (err, dirPath) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(dirPath)
|
||||
resolve(dirPath);
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function verifyAssets (release) {
|
||||
const downloadDir = await makeTempDir()
|
||||
const downloadDir = await makeTempDir();
|
||||
|
||||
console.log('Downloading files from GitHub to verify shasums')
|
||||
const shaSumFile = 'SHASUMS256.txt'
|
||||
console.log('Downloading files from GitHub to verify shasums');
|
||||
const shaSumFile = 'SHASUMS256.txt';
|
||||
|
||||
let filesToCheck = await Promise.all(release.assets.map(async asset => {
|
||||
const requestOptions = await octokit.repos.getReleaseAsset.endpoint({
|
||||
|
@ -332,26 +332,26 @@ async function verifyAssets (release) {
|
|||
headers: {
|
||||
Accept: 'application/octet-stream'
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const { url, headers } = requestOptions
|
||||
headers.authorization = `token ${process.env.ELECTRON_GITHUB_TOKEN}`
|
||||
const { url, headers } = requestOptions;
|
||||
headers.authorization = `token ${process.env.ELECTRON_GITHUB_TOKEN}`;
|
||||
|
||||
const response = await got(url, {
|
||||
followRedirect: false,
|
||||
method: 'HEAD',
|
||||
headers
|
||||
})
|
||||
});
|
||||
|
||||
await downloadFiles(response.headers.location, downloadDir, asset.name)
|
||||
return asset.name
|
||||
await downloadFiles(response.headers.location, downloadDir, asset.name);
|
||||
return asset.name;
|
||||
})).catch(err => {
|
||||
console.log(`${fail} Error downloading files from GitHub`, err)
|
||||
process.exit(1)
|
||||
})
|
||||
console.log(`${fail} Error downloading files from GitHub`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile)
|
||||
let checkerOpts
|
||||
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile);
|
||||
let checkerOpts;
|
||||
await validateChecksums({
|
||||
algorithm: 'sha256',
|
||||
filesToCheck,
|
||||
|
@ -359,71 +359,71 @@ async function verifyAssets (release) {
|
|||
shaSumFile,
|
||||
checkerOpts,
|
||||
fileSource: 'GitHub'
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
function downloadFiles (urls, directory, targetName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nuggetOpts = { dir: directory }
|
||||
nuggetOpts.quiet = !args.verboseNugget
|
||||
if (targetName) nuggetOpts.target = targetName
|
||||
const nuggetOpts = { dir: directory };
|
||||
nuggetOpts.quiet = !args.verboseNugget;
|
||||
if (targetName) nuggetOpts.target = targetName;
|
||||
|
||||
nugget(urls, nuggetOpts, (err) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
} else {
|
||||
console.log(`${pass} all files downloaded successfully!`)
|
||||
resolve()
|
||||
console.log(`${pass} all files downloaded successfully!`);
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function verifyShasums (urls, isS3) {
|
||||
const fileSource = isS3 ? 'S3' : 'GitHub'
|
||||
console.log(`Downloading files from ${fileSource} to verify shasums`)
|
||||
const downloadDir = await makeTempDir()
|
||||
let filesToCheck = []
|
||||
const fileSource = isS3 ? 'S3' : 'GitHub';
|
||||
console.log(`Downloading files from ${fileSource} to verify shasums`);
|
||||
const downloadDir = await makeTempDir();
|
||||
let filesToCheck = [];
|
||||
try {
|
||||
if (!isS3) {
|
||||
await downloadFiles(urls, downloadDir)
|
||||
await downloadFiles(urls, downloadDir);
|
||||
filesToCheck = urls.map(url => {
|
||||
const currentUrl = new URL(url)
|
||||
return path.basename(currentUrl.pathname)
|
||||
}).filter(file => file.indexOf('SHASUMS') === -1)
|
||||
const currentUrl = new URL(url);
|
||||
return path.basename(currentUrl.pathname);
|
||||
}).filter(file => file.indexOf('SHASUMS') === -1);
|
||||
} else {
|
||||
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`
|
||||
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`;
|
||||
await Promise.all(urls.map(async (url) => {
|
||||
const currentUrl = new URL(url)
|
||||
const dirname = path.dirname(currentUrl.pathname)
|
||||
const filename = path.basename(currentUrl.pathname)
|
||||
const s3VersionPathIdx = dirname.indexOf(s3VersionPath)
|
||||
const currentUrl = new URL(url);
|
||||
const dirname = path.dirname(currentUrl.pathname);
|
||||
const filename = path.basename(currentUrl.pathname);
|
||||
const s3VersionPathIdx = dirname.indexOf(s3VersionPath);
|
||||
if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
|
||||
if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
|
||||
filesToCheck.push(filename)
|
||||
filesToCheck.push(filename);
|
||||
}
|
||||
await downloadFiles(url, downloadDir)
|
||||
await downloadFiles(url, downloadDir);
|
||||
} else {
|
||||
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length)
|
||||
const fileDirectory = path.join(downloadDir, subDirectory)
|
||||
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length);
|
||||
const fileDirectory = path.join(downloadDir, subDirectory);
|
||||
try {
|
||||
fs.statSync(fileDirectory)
|
||||
fs.statSync(fileDirectory);
|
||||
} catch (err) {
|
||||
fs.mkdirSync(fileDirectory)
|
||||
fs.mkdirSync(fileDirectory);
|
||||
}
|
||||
filesToCheck.push(path.join(subDirectory, filename))
|
||||
await downloadFiles(url, fileDirectory)
|
||||
filesToCheck.push(path.join(subDirectory, filename));
|
||||
await downloadFiles(url, fileDirectory);
|
||||
}
|
||||
}))
|
||||
}));
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`${fail} Error downloading files from ${fileSource}`, err)
|
||||
process.exit(1)
|
||||
console.log(`${fail} Error downloading files from ${fileSource}`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`)
|
||||
let checkerOpts
|
||||
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`);
|
||||
let checkerOpts;
|
||||
if (isS3) {
|
||||
checkerOpts = { defaultTextEncoding: 'binary' }
|
||||
checkerOpts = { defaultTextEncoding: 'binary' };
|
||||
}
|
||||
|
||||
await validateChecksums({
|
||||
|
@ -433,7 +433,7 @@ async function verifyShasums (urls, isS3) {
|
|||
shaSumFile: 'SHASUMS256.txt',
|
||||
checkerOpts,
|
||||
fileSource
|
||||
})
|
||||
});
|
||||
|
||||
if (isS3) {
|
||||
await validateChecksums({
|
||||
|
@ -443,37 +443,37 @@ async function verifyShasums (urls, isS3) {
|
|||
shaSumFile: 'SHASUMS.txt',
|
||||
checkerOpts,
|
||||
fileSource
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateChecksums (validationArgs) {
|
||||
console.log(`Validating checksums for files from ${validationArgs.fileSource} ` +
|
||||
`against ${validationArgs.shaSumFile}.`)
|
||||
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile)
|
||||
`against ${validationArgs.shaSumFile}.`);
|
||||
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile);
|
||||
const checker = new sumchecker.ChecksumValidator(validationArgs.algorithm,
|
||||
shaSumFilePath, validationArgs.checkerOpts)
|
||||
shaSumFilePath, validationArgs.checkerOpts);
|
||||
await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck)
|
||||
.catch(err => {
|
||||
if (err instanceof sumchecker.ChecksumMismatchError) {
|
||||
console.error(`${fail} The checksum of ${err.filename} from ` +
|
||||
`${validationArgs.fileSource} did not match the shasum in ` +
|
||||
`${validationArgs.shaSumFile}`)
|
||||
`${validationArgs.shaSumFile}`);
|
||||
} else if (err instanceof sumchecker.ChecksumParseError) {
|
||||
console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` +
|
||||
`from ${validationArgs.fileSource} could not be parsed.`, err)
|
||||
`from ${validationArgs.fileSource} could not be parsed.`, err);
|
||||
} else if (err instanceof sumchecker.NoChecksumFoundError) {
|
||||
console.error(`${fail} The file ${err.filename} from ` +
|
||||
`${validationArgs.fileSource} was not in the shasum file ` +
|
||||
`${validationArgs.shaSumFile}.`)
|
||||
`${validationArgs.shaSumFile}.`);
|
||||
} else {
|
||||
console.error(`${fail} Error matching files from ` +
|
||||
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err)
|
||||
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err);
|
||||
}
|
||||
process.exit(1)
|
||||
})
|
||||
process.exit(1);
|
||||
});
|
||||
console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
|
||||
`shasums defined in ${validationArgs.shaSumFile}.`)
|
||||
`shasums defined in ${validationArgs.shaSumFile}.`);
|
||||
}
|
||||
|
||||
makeRelease(args.validateRelease)
|
||||
makeRelease(args.validateRelease);
|
||||
|
|
|
@ -1,40 +1,40 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load()
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const fs = require('fs')
|
||||
const fs = require('fs');
|
||||
|
||||
const octokit = require('@octokit/rest')({
|
||||
auth: process.env.ELECTRON_GITHUB_TOKEN
|
||||
})
|
||||
});
|
||||
|
||||
if (process.argv.length < 6) {
|
||||
console.log('Usage: upload-to-github filePath fileName releaseId')
|
||||
process.exit(1)
|
||||
console.log('Usage: upload-to-github filePath fileName releaseId');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filePath = process.argv[2]
|
||||
const fileName = process.argv[3]
|
||||
const releaseId = process.argv[4]
|
||||
const releaseVersion = process.argv[5]
|
||||
const filePath = process.argv[2];
|
||||
const fileName = process.argv[3];
|
||||
const releaseId = process.argv[4];
|
||||
const releaseVersion = process.argv[5];
|
||||
|
||||
const getHeaders = (filePath, fileName) => {
|
||||
const extension = fileName.split('.').pop()
|
||||
const size = fs.statSync(filePath).size
|
||||
const extension = fileName.split('.').pop();
|
||||
const size = fs.statSync(filePath).size;
|
||||
const options = {
|
||||
json: 'text/json',
|
||||
zip: 'application/zip',
|
||||
txt: 'text/plain',
|
||||
ts: 'application/typescript'
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
'content-type': options[extension],
|
||||
'content-length': size
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`
|
||||
let retry = 0
|
||||
const targetRepo = releaseVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
|
||||
let retry = 0;
|
||||
|
||||
function uploadToGitHub () {
|
||||
octokit.repos.uploadReleaseAsset({
|
||||
|
@ -43,12 +43,12 @@ function uploadToGitHub () {
|
|||
file: fs.createReadStream(filePath),
|
||||
name: fileName
|
||||
}).then(() => {
|
||||
console.log(`Successfully uploaded ${fileName} to GitHub.`)
|
||||
process.exit()
|
||||
console.log(`Successfully uploaded ${fileName} to GitHub.`);
|
||||
process.exit();
|
||||
}).catch((err) => {
|
||||
if (retry < 4) {
|
||||
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err)
|
||||
retry++
|
||||
console.log(`Error uploading ${fileName} to GitHub, will retry. Error was:`, err);
|
||||
retry++;
|
||||
|
||||
octokit.repos.listAssetsForRelease({
|
||||
owner: 'electron',
|
||||
|
@ -56,31 +56,31 @@ function uploadToGitHub () {
|
|||
release_id: releaseId,
|
||||
per_page: 100
|
||||
}).then(assets => {
|
||||
console.log('Got list of assets for existing release:')
|
||||
console.log(JSON.stringify(assets.data, null, ' '))
|
||||
const existingAssets = assets.data.filter(asset => asset.name === fileName)
|
||||
console.log('Got list of assets for existing release:');
|
||||
console.log(JSON.stringify(assets.data, null, ' '));
|
||||
const existingAssets = assets.data.filter(asset => asset.name === fileName);
|
||||
|
||||
if (existingAssets.length > 0) {
|
||||
console.log(`${fileName} already exists; will delete before retrying upload.`)
|
||||
console.log(`${fileName} already exists; will delete before retrying upload.`);
|
||||
octokit.repos.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
}).catch((deleteErr) => {
|
||||
console.log(`Failed to delete existing asset ${fileName}. Error was:`, deleteErr)
|
||||
}).then(uploadToGitHub)
|
||||
console.log(`Failed to delete existing asset ${fileName}. Error was:`, deleteErr);
|
||||
}).then(uploadToGitHub);
|
||||
} else {
|
||||
console.log(`Current asset ${fileName} not found in existing assets; retrying upload.`)
|
||||
uploadToGitHub()
|
||||
console.log(`Current asset ${fileName} not found in existing assets; retrying upload.`);
|
||||
uploadToGitHub();
|
||||
}
|
||||
}).catch((getReleaseErr) => {
|
||||
console.log('Fatal: Unable to get current release assets via getRelease! Error was:', getReleaseErr)
|
||||
})
|
||||
console.log('Fatal: Unable to get current release assets via getRelease! Error was:', getReleaseErr);
|
||||
});
|
||||
} else {
|
||||
console.log(`Error retrying uploading ${fileName} to GitHub:`, err)
|
||||
process.exitCode = 1
|
||||
console.log(`Error retrying uploading ${fileName} to GitHub:`, err);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
uploadToGitHub()
|
||||
uploadToGitHub();
|
||||
|
|
|
@ -1,26 +1,26 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite')
|
||||
const fs = require('fs')
|
||||
const semver = require('semver')
|
||||
const path = require('path')
|
||||
const { promisify } = require('util')
|
||||
const minimist = require('minimist')
|
||||
const { GitProcess } = require('dugite');
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const minimist = require('minimist');
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const versionUtils = require('./version-utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
const versionUtils = require('./version-utils');
|
||||
|
||||
const writeFile = promisify(fs.writeFile)
|
||||
const readFile = promisify(fs.readFile)
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
const readFile = promisify(fs.readFile);
|
||||
|
||||
function parseCommandLine () {
|
||||
let help
|
||||
let help;
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
string: ['bump', 'version'],
|
||||
boolean: ['dryRun', 'help'],
|
||||
alias: { version: ['v'] },
|
||||
unknown: arg => { help = true }
|
||||
})
|
||||
unknown: arg => { help = true; }
|
||||
});
|
||||
if (help || opts.help || !opts.bump) {
|
||||
console.log(`
|
||||
Bump release version number. Possible arguments:\n
|
||||
|
@ -28,30 +28,30 @@ function parseCommandLine () {
|
|||
--version={version} to set version number directly\n
|
||||
--dryRun to print the next version without updating files
|
||||
Note that you can use both --bump and --stable simultaneously.
|
||||
`)
|
||||
process.exit(0)
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
return opts
|
||||
return opts;
|
||||
}
|
||||
|
||||
// run the script
|
||||
async function main () {
|
||||
const opts = parseCommandLine()
|
||||
const currentVersion = await versionUtils.getElectronVersion()
|
||||
const version = await nextVersion(opts.bump, currentVersion)
|
||||
const opts = parseCommandLine();
|
||||
const currentVersion = await versionUtils.getElectronVersion();
|
||||
const version = await nextVersion(opts.bump, currentVersion);
|
||||
|
||||
const parsed = semver.parse(version)
|
||||
const parsed = semver.parse(version);
|
||||
const components = {
|
||||
major: parsed.major,
|
||||
minor: parsed.minor,
|
||||
patch: parsed.patch,
|
||||
pre: parsed.prerelease
|
||||
}
|
||||
};
|
||||
|
||||
// print would-be new version and exit early
|
||||
if (opts.dryRun) {
|
||||
console.log(`new version number would be: ${version}\n`)
|
||||
return 0
|
||||
console.log(`new version number would be: ${version}\n`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// update all version-related files
|
||||
|
@ -59,12 +59,12 @@ async function main () {
|
|||
updateVersion(version),
|
||||
updatePackageJSON(version),
|
||||
updateWinRC(components)
|
||||
])
|
||||
]);
|
||||
|
||||
// commit all updated version-related files
|
||||
await commitVersionBump(version)
|
||||
await commitVersionBump(version);
|
||||
|
||||
console.log(`Bumped to version: ${version}`)
|
||||
console.log(`Bumped to version: ${version}`);
|
||||
}
|
||||
|
||||
// get next version for release based on [nightly, beta, stable]
|
||||
|
@ -72,81 +72,81 @@ async function nextVersion (bumpType, version) {
|
|||
if (versionUtils.isNightly(version) || versionUtils.isBeta(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = await versionUtils.nextNightly(version)
|
||||
break
|
||||
version = await versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'beta':
|
||||
version = await versionUtils.nextBeta(version)
|
||||
break
|
||||
version = await versionUtils.nextBeta(version);
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.valid(semver.coerce(version))
|
||||
break
|
||||
version = semver.valid(semver.coerce(version));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.')
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else if (versionUtils.isStable(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = versionUtils.nextNightly(version)
|
||||
break
|
||||
version = versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'beta':
|
||||
throw new Error('Cannot bump to beta from stable.')
|
||||
throw new Error('Cannot bump to beta from stable.');
|
||||
case 'minor':
|
||||
version = semver.inc(version, 'minor')
|
||||
break
|
||||
version = semver.inc(version, 'minor');
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.inc(version, 'patch')
|
||||
break
|
||||
version = semver.inc(version, 'patch');
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.')
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid current version: ${version}`)
|
||||
throw new Error(`Invalid current version: ${version}`);
|
||||
}
|
||||
return version
|
||||
return version;
|
||||
}
|
||||
|
||||
// update VERSION file with latest release info
|
||||
async function updateVersion (version) {
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
|
||||
await writeFile(versionPath, version, 'utf8')
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION');
|
||||
await writeFile(versionPath, version, 'utf8');
|
||||
}
|
||||
|
||||
// update package metadata files with new version
|
||||
async function updatePackageJSON (version) {
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'package.json')
|
||||
const file = require(filePath)
|
||||
file.version = version
|
||||
await writeFile(filePath, JSON.stringify(file, null, 2))
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'package.json');
|
||||
const file = require(filePath);
|
||||
file.version = version;
|
||||
await writeFile(filePath, JSON.stringify(file, null, 2));
|
||||
}
|
||||
|
||||
// push bump commit to release branch
|
||||
async function commitVersionBump (version) {
|
||||
const gitArgs = ['commit', '-a', '-m', `Bump v${version}`, '-n']
|
||||
await GitProcess.exec(gitArgs, ELECTRON_DIR)
|
||||
const gitArgs = ['commit', '-a', '-m', `Bump v${version}`, '-n'];
|
||||
await GitProcess.exec(gitArgs, ELECTRON_DIR);
|
||||
}
|
||||
|
||||
// updates atom.rc file with new semver values
|
||||
async function updateWinRC (components) {
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'shell', 'browser', 'resources', 'win', 'atom.rc')
|
||||
const data = await readFile(filePath, 'utf8')
|
||||
const arr = data.split('\n')
|
||||
const filePath = path.resolve(ELECTRON_DIR, 'shell', 'browser', 'resources', 'win', 'atom.rc');
|
||||
const data = await readFile(filePath, 'utf8');
|
||||
const arr = data.split('\n');
|
||||
arr.forEach((line, idx) => {
|
||||
if (line.includes('FILEVERSION')) {
|
||||
arr[idx] = ` FILEVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
|
||||
arr[idx + 1] = ` PRODUCTVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`
|
||||
arr[idx] = ` FILEVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`;
|
||||
arr[idx + 1] = ` PRODUCTVERSION ${versionUtils.makeVersion(components, ',', versionUtils.preType.PARTIAL)}`;
|
||||
} else if (line.includes('FileVersion')) {
|
||||
arr[idx] = ` VALUE "FileVersion", "${versionUtils.makeVersion(components, '.')}"`
|
||||
arr[idx + 5] = ` VALUE "ProductVersion", "${versionUtils.makeVersion(components, '.')}"`
|
||||
arr[idx] = ` VALUE "FileVersion", "${versionUtils.makeVersion(components, '.')}"`;
|
||||
arr[idx + 5] = ` VALUE "ProductVersion", "${versionUtils.makeVersion(components, '.')}"`;
|
||||
}
|
||||
})
|
||||
await writeFile(filePath, arr.join('\n'))
|
||||
});
|
||||
await writeFile(filePath, arr.join('\n'));
|
||||
}
|
||||
|
||||
if (process.mainModule === module) {
|
||||
main().catch((error) => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { nextVersion }
|
||||
module.exports = { nextVersion };
|
||||
|
|
|
@ -1,90 +1,90 @@
|
|||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const semver = require('semver')
|
||||
const { GitProcess } = require('dugite')
|
||||
const { promisify } = require('util')
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const { GitProcess } = require('dugite');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils')
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
|
||||
const readFile = promisify(fs.readFile)
|
||||
const readFile = promisify(fs.readFile);
|
||||
|
||||
const preType = {
|
||||
NONE: 'none',
|
||||
PARTIAL: 'partial',
|
||||
FULL: 'full'
|
||||
}
|
||||
};
|
||||
|
||||
const getCurrentDate = () => {
|
||||
const d = new Date()
|
||||
const dd = `${d.getDate()}`.padStart(2, '0')
|
||||
const mm = `${d.getMonth() + 1}`.padStart(2, '0')
|
||||
const yyyy = d.getFullYear()
|
||||
return `${yyyy}${mm}${dd}`
|
||||
}
|
||||
const d = new Date();
|
||||
const dd = `${d.getDate()}`.padStart(2, '0');
|
||||
const mm = `${d.getMonth() + 1}`.padStart(2, '0');
|
||||
const yyyy = d.getFullYear();
|
||||
return `${yyyy}${mm}${dd}`;
|
||||
};
|
||||
|
||||
const isNightly = v => v.includes('nightly')
|
||||
const isBeta = v => v.includes('beta')
|
||||
const isNightly = v => v.includes('nightly');
|
||||
const isBeta = v => v.includes('beta');
|
||||
const isStable = v => {
|
||||
const parsed = semver.parse(v)
|
||||
return !!(parsed && parsed.prerelease.length === 0)
|
||||
}
|
||||
const parsed = semver.parse(v);
|
||||
return !!(parsed && parsed.prerelease.length === 0);
|
||||
};
|
||||
|
||||
const makeVersion = (components, delim, pre = preType.NONE) => {
|
||||
let version = [components.major, components.minor, components.patch].join(delim)
|
||||
let version = [components.major, components.minor, components.patch].join(delim);
|
||||
if (pre === preType.PARTIAL) {
|
||||
version += `${delim}${components.pre[1] || 0}`
|
||||
version += `${delim}${components.pre[1] || 0}`;
|
||||
} else if (pre === preType.FULL) {
|
||||
version += `-${components.pre[0]}${delim}${components.pre[1]}`
|
||||
version += `-${components.pre[0]}${delim}${components.pre[1]}`;
|
||||
}
|
||||
return version
|
||||
}
|
||||
return version;
|
||||
};
|
||||
|
||||
async function nextBeta (v) {
|
||||
const next = semver.coerce(semver.clean(v))
|
||||
const next = semver.coerce(semver.clean(v));
|
||||
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR)
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '')
|
||||
tags.sort((t1, t2) => semver.gt(t1, t2))
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR);
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
|
||||
tags.sort((t1, t2) => semver.gt(t1, t2));
|
||||
|
||||
// increment the latest existing beta tag or start at beta.1 if it's a new beta line
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease')
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease');
|
||||
}
|
||||
|
||||
async function getElectronVersion () {
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION')
|
||||
const version = await readFile(versionPath, 'utf8')
|
||||
return version.trim()
|
||||
const versionPath = path.resolve(ELECTRON_DIR, 'ELECTRON_VERSION');
|
||||
const version = await readFile(versionPath, 'utf8');
|
||||
return version.trim();
|
||||
}
|
||||
|
||||
async function nextNightly (v) {
|
||||
let next = semver.valid(semver.coerce(v))
|
||||
const pre = `nightly.${getCurrentDate()}`
|
||||
let next = semver.valid(semver.coerce(v));
|
||||
const pre = `nightly.${getCurrentDate()}`;
|
||||
|
||||
const branch = (await GitProcess.exec(['rev-parse', '--abbrev-ref', 'HEAD'], ELECTRON_DIR)).stdout.trim()
|
||||
const branch = (await GitProcess.exec(['rev-parse', '--abbrev-ref', 'HEAD'], ELECTRON_DIR)).stdout.trim();
|
||||
if (branch === 'master') {
|
||||
next = semver.inc(await getLastMajorForMaster(), 'major')
|
||||
next = semver.inc(await getLastMajorForMaster(), 'major');
|
||||
} else if (isStable(v)) {
|
||||
next = semver.inc(next, 'patch')
|
||||
next = semver.inc(next, 'patch');
|
||||
}
|
||||
|
||||
return `${next}-${pre}`
|
||||
return `${next}-${pre}`;
|
||||
}
|
||||
|
||||
async function getLastMajorForMaster () {
|
||||
let branchNames
|
||||
const result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]*-x-y'], ELECTRON_DIR)
|
||||
let branchNames;
|
||||
const result = await GitProcess.exec(['branch', '-a', '--remote', '--list', 'origin/[0-9]*-x-y'], ELECTRON_DIR);
|
||||
if (result.exitCode === 0) {
|
||||
branchNames = result.stdout.trim().split('\n')
|
||||
const filtered = branchNames.map(b => b.replace('origin/', ''))
|
||||
return getNextReleaseBranch(filtered)
|
||||
branchNames = result.stdout.trim().split('\n');
|
||||
const filtered = branchNames.map(b => b.replace('origin/', ''));
|
||||
return getNextReleaseBranch(filtered);
|
||||
} else {
|
||||
throw new Error('Release branches could not be fetched.')
|
||||
throw new Error('Release branches could not be fetched.');
|
||||
}
|
||||
}
|
||||
|
||||
function getNextReleaseBranch (branches) {
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'))
|
||||
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2)
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'));
|
||||
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@ -96,4 +96,4 @@ module.exports = {
|
|||
getElectronVersion,
|
||||
nextNightly,
|
||||
preType
|
||||
}
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue