build: convert all release scripts to typescript (#44062)

* build: convert all release scripts to typescript (#44035)

* build: convert all release scripts to typescript

* fix test imports

* build: fix version bumper export

* refactor: use as const

* spec: fix bad type spec

* build: use ts-node to spawn the version-bumper (#44057)

Missed this in the tsification, we should probably call this via API instead of spawning a sub-proc?
This commit is contained in:
Samuel Attard 2024-10-01 08:49:57 -07:00 committed by GitHub
parent 52709bf9e3
commit 5bb78f9071
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 1163 additions and 762 deletions

View file

@ -11,7 +11,7 @@
"@electron/github-app-auth": "^2.0.0", "@electron/github-app-auth": "^2.0.0",
"@electron/lint-roller": "^1.12.1", "@electron/lint-roller": "^1.12.1",
"@electron/typescript-definitions": "^8.15.2", "@electron/typescript-definitions": "^8.15.2",
"@octokit/rest": "^19.0.7", "@octokit/rest": "^20.0.2",
"@primer/octicons": "^10.0.0", "@primer/octicons": "^10.0.0",
"@types/basic-auth": "^1.1.3", "@types/basic-auth": "^1.1.3",
"@types/busboy": "^1.5.0", "@types/busboy": "^1.5.0",
@ -36,7 +36,6 @@
"buffer": "^6.0.3", "buffer": "^6.0.3",
"check-for-leaks": "^1.2.1", "check-for-leaks": "^1.2.1",
"colors": "1.4.0", "colors": "1.4.0",
"dotenv-safe": "^4.0.4",
"dugite": "^2.3.0", "dugite": "^2.3.0",
"eslint": "^8.41.0", "eslint": "^8.41.0",
"eslint-config-standard": "^14.1.1", "eslint-config-standard": "^14.1.1",
@ -67,7 +66,7 @@
"timers-browserify": "1.4.2", "timers-browserify": "1.4.2",
"ts-loader": "^8.0.2", "ts-loader": "^8.0.2",
"ts-node": "6.2.0", "ts-node": "6.2.0",
"typescript": "^5.1.2", "typescript": "^5.6.2",
"url": "^0.11.0", "url": "^0.11.0",
"webpack": "^5.76.0", "webpack": "^5.76.0",
"webpack-cli": "^4.10.0", "webpack-cli": "^4.10.0",

View file

@ -1,5 +1,3 @@
if (!process.env.CI) require('dotenv-safe').load();
const assert = require('node:assert'); const assert = require('node:assert');
const fs = require('node:fs'); const fs = require('node:fs');
const got = require('got'); const got = require('got');

View file

@ -1,19 +1,19 @@
if (!process.env.CI) require('dotenv-safe').load(); import { Octokit } from '@octokit/rest';
import got, { OptionsOfTextResponseBody } from 'got';
import * as assert from 'node:assert';
const assert = require('node:assert'); import { createGitHubTokenStrategy } from './github-token';
const got = require('got'); import { ELECTRON_ORG, ELECTRON_REPO } from './types';
import { parseArgs } from 'node:util';
const { Octokit } = require('@octokit/rest');
const { createGitHubTokenStrategy } = require('./github-token');
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy('electron') authStrategy: createGitHubTokenStrategy('electron')
}); });
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds'; const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds';
const GH_ACTIONS_PIPELINE_URL = 'https://github.com/electron/electron/actions'; const GH_ACTIONS_PIPELINE_URL = 'https://github.com/electron/electron/actions';
const GH_ACTIONS_API_URL = '/repos/electron/electron/actions';
const GH_ACTIONS_WAIT_TIME = process.env.GH_ACTIONS_WAIT_TIME || 30000; const GH_ACTIONS_WAIT_TIME = process.env.GH_ACTIONS_WAIT_TIME ? parseInt(process.env.GH_ACTIONS_WAIT_TIME, 10) : 30000;
const appVeyorJobs = { const appVeyorJobs = {
'electron-x64': 'electron-x64-release', 'electron-x64': 'electron-x64-release',
@ -24,11 +24,21 @@ const appVeyorJobs = {
const ghActionsPublishWorkflows = [ const ghActionsPublishWorkflows = [
'linux-publish', 'linux-publish',
'macos-publish' 'macos-publish'
]; ] as const;
let jobRequestedCount = 0; let jobRequestedCount = 0;
async function makeRequest ({ auth, username, password, url, headers, body, method }) { type ReleaseBuildRequestOptions = {
auth?: {
bearer?: string;
};
url: string;
headers: Record<string, string>;
body: string,
method: 'GET' | 'POST';
}
async function makeRequest ({ auth, url, headers, body, method }: ReleaseBuildRequestOptions) {
const clonedHeaders = { const clonedHeaders = {
...(headers || {}) ...(headers || {})
}; };
@ -36,17 +46,12 @@ async function makeRequest ({ auth, username, password, url, headers, body, meth
clonedHeaders.Authorization = `Bearer ${auth.bearer}`; clonedHeaders.Authorization = `Bearer ${auth.bearer}`;
} }
const options = { const options: OptionsOfTextResponseBody = {
headers: clonedHeaders, headers: clonedHeaders,
body, body,
method method
}; };
if (username || password) {
options.username = username;
options.password = password;
}
const response = await got(url, options); const response = await got(url, options);
if (response.statusCode < 200 || response.statusCode >= 300) { if (response.statusCode < 200 || response.statusCode >= 300) {
@ -56,11 +61,17 @@ async function makeRequest ({ auth, username, password, url, headers, body, meth
return JSON.parse(response.body); return JSON.parse(response.body);
} }
async function githubActionsCall (targetBranch, workflowName, options) { type GitHubActionsCallOptions = {
ghRelease?: boolean;
newVersion: string;
runningPublishWorkflows?: boolean;
}
async function githubActionsCall (targetBranch: string, workflowName: string, options: GitHubActionsCallOptions) {
console.log(`Triggering GitHub Actions to run build job: ${workflowName} on branch: ${targetBranch} with release flag.`); console.log(`Triggering GitHub Actions to run build job: ${workflowName} on branch: ${targetBranch} with release flag.`);
const buildRequest = { const buildRequest = {
branch: targetBranch, branch: targetBranch,
parameters: {} parameters: {} as Record<string, string | boolean>
}; };
if (options.ghRelease) { if (options.ghRelease) {
buildRequest.parameters['upload-to-storage'] = '0'; buildRequest.parameters['upload-to-storage'] = '0';
@ -81,13 +92,13 @@ async function githubActionsCall (targetBranch, workflowName, options) {
console.error('Could not fetch most recent commits for GitHub Actions, returning early'); console.error('Could not fetch most recent commits for GitHub Actions, returning early');
} }
await octokit.request(`POST ${GH_ACTIONS_API_URL}/workflows/${workflowName}.yml/dispatches`, { await octokit.actions.createWorkflowDispatch({
repo: ELECTRON_REPO,
owner: ELECTRON_ORG,
workflow_id: `${workflowName}.yml`,
ref: `refs/tags/${options.newVersion}`, ref: `refs/tags/${options.newVersion}`,
inputs: { inputs: {
...buildRequest.parameters ...buildRequest.parameters
},
headers: {
'X-GitHub-Api-Version': '2022-11-28'
} }
}); });
@ -110,17 +121,18 @@ async function githubActionsCall (targetBranch, workflowName, options) {
} }
} }
async function getGitHubActionsRun (workflowId, headCommit) { async function getGitHubActionsRun (workflowName: string, headCommit: string) {
let runNumber = 0; let runNumber = 0;
let actionRun; let actionRun;
while (runNumber === 0) { while (runNumber === 0) {
const actionsRuns = await octokit.request(`GET ${GH_ACTIONS_API_URL}/workflows/${workflowId}.yml/runs`, { const actionsRuns = await octokit.actions.listWorkflowRuns({
headers: { repo: ELECTRON_REPO,
'X-GitHub-Api-Version': '2022-11-28' owner: ELECTRON_ORG,
} workflow_id: `${workflowName}.yml`
}); });
if (!actionsRuns.data.workflow_runs.length) { if (!actionsRuns.data.workflow_runs.length) {
console.log(`No current workflow_runs found for ${workflowId}, response was: ${actionsRuns.data.workflow_runs}`); console.log(`No current workflow_runs found for ${workflowName}, response was: ${actionsRuns.data.workflow_runs}`);
runNumber = -1; runNumber = -1;
break; break;
} }
@ -163,9 +175,14 @@ async function getGitHubActionsRun (workflowId, headCommit) {
return runNumber; return runNumber;
} }
async function callAppVeyor (targetBranch, job, options) { type AppVeyorCallOptions = {
ghRelease?: boolean;
commit?: string;
}
async function callAppVeyor (targetBranch: string, job: keyof typeof appVeyorJobs, options: AppVeyorCallOptions) {
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`); console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`);
const environmentVariables = { const environmentVariables: Record<string, string | number> = {
ELECTRON_RELEASE: 1, ELECTRON_RELEASE: 1,
APPVEYOR_BUILD_WORKER_CLOUD: 'electronhq-16-core' APPVEYOR_BUILD_WORKER_CLOUD: 'electronhq-16-core'
}; };
@ -190,14 +207,14 @@ async function callAppVeyor (targetBranch, job, options) {
environmentVariables environmentVariables
}), }),
method: 'POST' method: 'POST'
}; } as const;
jobRequestedCount++; jobRequestedCount++;
try { try {
const { version } = await makeRequest(requestOpts, true); const { version } = await makeRequest(requestOpts);
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${version}`; const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${version}`;
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`); console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`);
} catch (err) { } catch (err: any) {
if (err.response?.body) { if (err.response?.body) {
console.error('Could not call AppVeyor: ', { console.error('Could not call AppVeyor: ', {
statusCode: err.response.statusCode, statusCode: err.response.statusCode,
@ -209,67 +226,120 @@ async function callAppVeyor (targetBranch, job, options) {
} }
} }
function buildAppVeyor (targetBranch, options) { type BuildAppVeyorOptions = {
const validJobs = Object.keys(appVeyorJobs); job?: keyof typeof appVeyorJobs;
} & AppVeyorCallOptions;
async function buildAppVeyor (targetBranch: string, options: BuildAppVeyorOptions) {
const validJobs = Object.keys(appVeyorJobs) as (keyof typeof appVeyorJobs)[];
if (options.job) { if (options.job) {
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`); assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`);
callAppVeyor(targetBranch, options.job, options); await callAppVeyor(targetBranch, options.job, options);
} else { } else {
for (const job of validJobs) { for (const job of validJobs) {
callAppVeyor(targetBranch, job, options); await callAppVeyor(targetBranch, job, options);
} }
} }
} }
function buildGHActions (targetBranch, options) { type BuildGHActionsOptions = {
job?: typeof ghActionsPublishWorkflows[number];
arch?: string;
} & GitHubActionsCallOptions;
async function buildGHActions (targetBranch: string, options: BuildGHActionsOptions) {
if (options.job) { if (options.job) {
assert(ghActionsPublishWorkflows.includes(options.job), `Unknown GitHub Actions workflow name: ${options.job}. Valid values are: ${ghActionsPublishWorkflows}.`); assert(ghActionsPublishWorkflows.includes(options.job), `Unknown GitHub Actions workflow name: ${options.job}. Valid values are: ${ghActionsPublishWorkflows}.`);
githubActionsCall(targetBranch, options.job, options); await githubActionsCall(targetBranch, options.job, options);
} else { } else {
assert(!options.arch, 'Cannot provide a single architecture while building all workflows, please specify a single workflow via --workflow'); assert(!options.arch, 'Cannot provide a single architecture while building all workflows, please specify a single workflow via --workflow');
options.runningPublishWorkflows = true; options.runningPublishWorkflows = true;
for (const job of ghActionsPublishWorkflows) { for (const job of ghActionsPublishWorkflows) {
githubActionsCall(targetBranch, job, options); await githubActionsCall(targetBranch, job, options);
} }
} }
} }
function runRelease (targetBranch, options) { type RunReleaseOptions = ({
ci: 'GitHubActions'
} & BuildGHActionsOptions) | ({
ci: 'AppVeyor'
} & BuildAppVeyorOptions) | ({
ci: undefined,
} & BuildAppVeyorOptions & BuildGHActionsOptions);
async function runRelease (targetBranch: string, options: RunReleaseOptions) {
if (options.ci) { if (options.ci) {
switch (options.ci) { switch (options.ci) {
case 'GitHubActions': { case 'GitHubActions': {
buildGHActions(targetBranch, options); await buildGHActions(targetBranch, options);
break; break;
} }
case 'AppVeyor': { case 'AppVeyor': {
buildAppVeyor(targetBranch, options); await buildAppVeyor(targetBranch, options);
break; break;
} }
default: { default: {
console.log(`Error! Unknown CI: ${options.ci}.`); console.log(`Error! Unknown CI: ${(options as any).ci}.`);
process.exit(1); process.exit(1);
} }
} }
} else { } else {
buildAppVeyor(targetBranch, options); await Promise.all([
buildGHActions(targetBranch, options); buildAppVeyor(targetBranch, options),
buildGHActions(targetBranch, options)
]);
} }
console.log(`${jobRequestedCount} jobs were requested.`); console.log(`${jobRequestedCount} jobs were requested.`);
} }
module.exports = runRelease; export default runRelease;
if (require.main === module) { if (require.main === module) {
const args = require('minimist')(process.argv.slice(2), { const { values: { ghRelease, job, arch, ci, commit, newVersion }, positionals } = parseArgs({
boolean: ['ghRelease'] options: {
ghRelease: {
type: 'boolean'
},
job: {
type: 'string'
},
arch: {
type: 'string'
},
ci: {
type: 'string'
},
commit: {
type: 'string'
},
newVersion: {
type: 'string'
}
},
allowPositionals: true
}); });
const targetBranch = args._[0]; const targetBranch = positionals[0];
if (args._.length < 1) { if (positionals.length < 1) {
console.log(`Trigger CI to build release builds of electron. console.log(`Trigger CI to build release builds of electron.
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--arch=INDIVIDUAL_ARCH] [--ci=AppVeyor|GitHubActions] Usage: ci-release-build.js [--job=CI_JOB_NAME] [--arch=INDIVIDUAL_ARCH] [--ci=AppVeyor|GitHubActions]
[--ghRelease] [--appveyorJobId=xxx] [--commit=sha] TARGET_BRANCH [--ghRelease] [--commit=sha] [--newVersion=version_tag] TARGET_BRANCH
`); `);
process.exit(0); process.exit(0);
} }
runRelease(targetBranch, args); if (ci === 'GitHubActions' || !ci) {
if (!newVersion) {
console.error('--newVersion is required for GitHubActions');
process.exit(1);
}
}
runRelease(targetBranch, {
ci: ci as 'GitHubActions' | 'AppVeyor',
ghRelease,
job: job as any,
arch,
newVersion: newVersion!,
commit
});
} }

View file

@ -1,7 +1,6 @@
if (!process.env.CI) require('dotenv-safe').load(); import { Octokit } from '@octokit/rest';
import { createGitHubTokenStrategy } from './github-token';
const { Octokit } = require('@octokit/rest'); import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
const { createGitHubTokenStrategy } = require('./github-token');
if (process.argv.length < 3) { if (process.argv.length < 3) {
console.log('Usage: find-release version'); console.log('Usage: find-release version');
@ -15,13 +14,13 @@ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy(targetRepo) authStrategy: createGitHubTokenStrategy(targetRepo)
}); });
function findRepo () { function findRepo (): ElectronReleaseRepo {
return version.indexOf('nightly') > 0 ? 'nightlies' : 'electron'; return version.indexOf('nightly') > 0 ? NIGHTLY_REPO : ELECTRON_REPO;
} }
async function findRelease () { async function findRelease () {
const releases = await octokit.repos.listReleases({ const releases = await octokit.repos.listReleases({
owner: 'electron', owner: ELECTRON_ORG,
repo: targetRepo repo: targetRepo
}); });
@ -43,4 +42,8 @@ async function findRelease () {
console.log(JSON.stringify(returnObject)); console.log(JSON.stringify(returnObject));
} }
findRelease(); findRelease()
.catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -1,8 +1,9 @@
const { Octokit } = require('@octokit/rest'); import { Octokit } from '@octokit/rest';
const got = require('got'); import got from 'got';
const { createGitHubTokenStrategy } = require('./github-token'); import { createGitHubTokenStrategy } from './github-token';
import { ElectronReleaseRepo } from './types';
async function getAssetContents (repo, assetId) { export async function getAssetContents (repo: ElectronReleaseRepo, assetId: number) {
const octokit = new Octokit({ const octokit = new Octokit({
userAgent: 'electron-asset-fetcher', userAgent: 'electron-asset-fetcher',
authStrategy: createGitHubTokenStrategy(repo) authStrategy: createGitHubTokenStrategy(repo)
@ -18,12 +19,12 @@ async function getAssetContents (repo, assetId) {
}); });
const { url, headers } = requestOptions; const { url, headers } = requestOptions;
headers.authorization = `token ${(await octokit.auth()).token}`; headers.authorization = `token ${(await octokit.auth() as { token: string }).token}`;
const response = await got(url, { const response = await got(url, {
followRedirect: false, followRedirect: false,
method: 'HEAD', method: 'HEAD',
headers, headers: headers as Record<string, string>,
throwHttpErrors: false throwHttpErrors: false
}); });
@ -48,7 +49,3 @@ async function getAssetContents (repo, assetId) {
return fileResponse.body; return fileResponse.body;
} }
module.exports = {
getAssetContents
};

View file

@ -1,17 +1,20 @@
const got = require('got'); import got from 'got';
const url = require('node:url'); import * as url from 'node:url';
module.exports = async function getUrlHash (targetUrl, algorithm = 'sha256', attempts = 3) { const HASHER_FUNCTION_HOST = 'electron-artifact-hasher.azurewebsites.net';
const HASHER_FUNCTION_ROUTE = '/api/HashArtifact';
export async function getUrlHash (targetUrl: string, algorithm = 'sha256', attempts = 3) {
const options = { const options = {
code: process.env.ELECTRON_ARTIFACT_HASHER_FUNCTION_KEY, code: process.env.ELECTRON_ARTIFACT_HASHER_FUNCTION_KEY!,
targetUrl, targetUrl,
algorithm algorithm
}; };
const search = new url.URLSearchParams(options); const search = new url.URLSearchParams(options);
const functionUrl = url.format({ const functionUrl = url.format({
protocol: 'https:', protocol: 'https:',
hostname: 'electron-artifact-hasher.azurewebsites.net', hostname: HASHER_FUNCTION_HOST,
pathname: '/api/HashArtifact', pathname: HASHER_FUNCTION_ROUTE,
search: search.toString() search: search.toString()
}); });
try { try {
@ -27,10 +30,11 @@ module.exports = async function getUrlHash (targetUrl, algorithm = 'sha256', att
return resp.body.trim(); return resp.body.trim();
} catch (err) { } catch (err) {
if (attempts > 1) { if (attempts > 1) {
if (err.response?.body) { const { response } = err as any;
if (response?.body) {
console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, { console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, {
statusCode: err.response.statusCode, statusCode: response.statusCode,
body: JSON.parse(err.response.body) body: JSON.parse(response.body)
}); });
} else { } else {
console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, err); console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, err);

View file

@ -1,9 +1,11 @@
const { createTokenAuth } = require('@octokit/auth-token'); import { createTokenAuth } from '@octokit/auth-token';
const got = require('got').default; import got from 'got';
import { ElectronReleaseRepo } from './types';
const cachedTokens = Object.create(null); const cachedTokens = Object.create(null);
async function ensureToken (repo) { async function ensureToken (repo: ElectronReleaseRepo) {
if (!cachedTokens[repo]) { if (!cachedTokens[repo]) {
cachedTokens[repo] = await (async () => { cachedTokens[repo] = await (async () => {
const { ELECTRON_GITHUB_TOKEN, SUDOWOODO_EXCHANGE_URL, SUDOWOODO_EXCHANGE_TOKEN } = process.env; const { ELECTRON_GITHUB_TOKEN, SUDOWOODO_EXCHANGE_URL, SUDOWOODO_EXCHANGE_TOKEN } = process.env;
@ -35,23 +37,24 @@ async function ensureToken (repo) {
} }
} }
module.exports.createGitHubTokenStrategy = (repo) => () => { export const createGitHubTokenStrategy = (repo: ElectronReleaseRepo) => () => {
let tokenAuth = null; let tokenAuth: ReturnType<typeof createTokenAuth> | null = null;
async function ensureTokenAuth () { async function ensureTokenAuth (): Promise<ReturnType<typeof createTokenAuth>> {
if (!tokenAuth) { if (!tokenAuth) {
await ensureToken(repo); await ensureToken(repo);
tokenAuth = createTokenAuth(cachedTokens[repo]); tokenAuth = createTokenAuth(cachedTokens[repo]);
} }
return tokenAuth;
} }
async function auth () { async function auth () {
await ensureTokenAuth(); return await (await ensureTokenAuth())();
return await tokenAuth();
} }
auth.hook = async (...args) => { const hook: ReturnType<typeof createTokenAuth>['hook'] = async (...args) => {
await ensureTokenAuth(); const a = (await ensureTokenAuth());
return await tokenAuth.hook(...args); return (a as any).hook(...args);
}; };
auth.hook = hook;
return auth; return auth;
}; };

View file

@ -1,22 +1,22 @@
#!/usr/bin/env node #!/usr/bin/env node
const { GitProcess } = require('dugite'); import { GitProcess } from 'dugite';
const minimist = require('minimist'); import { basename } from 'node:path';
const path = require('node:path'); import { valid, compare, gte, lte } from 'semver';
const semver = require('semver');
const { ELECTRON_DIR } = require('../../lib/utils'); import { ELECTRON_DIR } from '../../lib/utils';
const notesGenerator = require('./notes.js'); import { get, render } from './notes';
const { Octokit } = require('@octokit/rest'); import { Octokit } from '@octokit/rest';
const { createGitHubTokenStrategy } = require('../github-token'); import { createGitHubTokenStrategy } from '../github-token';
import { parseArgs } from 'node:util';
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy('electron') authStrategy: createGitHubTokenStrategy('electron')
}); });
const semverify = version => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.'); const semverify = (version: string) => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.');
const runGit = async (args) => { const runGit = async (args: string[]) => {
console.info(`Running: git ${args.join(' ')}`); console.info(`Running: git ${args.join(' ')}`);
const response = await GitProcess.exec(args, ELECTRON_DIR); const response = await GitProcess.exec(args, ELECTRON_DIR);
if (response.exitCode !== 0) { if (response.exitCode !== 0) {
@ -25,25 +25,25 @@ const runGit = async (args) => {
return response.stdout.trim(); return response.stdout.trim();
}; };
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported'); const tagIsSupported = (tag: string) => !!tag && !tag.includes('nightly') && !tag.includes('unsupported');
const tagIsAlpha = tag => tag && tag.includes('alpha'); const tagIsAlpha = (tag: string) => !!tag && tag.includes('alpha');
const tagIsBeta = tag => tag && tag.includes('beta'); const tagIsBeta = (tag: string) => !!tag && tag.includes('beta');
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag); const tagIsStable = (tag: string) => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag);
const getTagsOf = async (point) => { const getTagsOf = async (point: string) => {
try { try {
const tags = await runGit(['tag', '--merged', point]); const tags = await runGit(['tag', '--merged', point]);
return tags.split('\n') return tags.split('\n')
.map(tag => tag.trim()) .map(tag => tag.trim())
.filter(tag => semver.valid(tag)) .filter(tag => valid(tag))
.sort(semver.compare); .sort(compare);
} catch (err) { } catch (err) {
console.error(`Failed to fetch tags for point ${point}`); console.error(`Failed to fetch tags for point ${point}`);
throw err; throw err;
} }
}; };
const getTagsOnBranch = async (point) => { const getTagsOnBranch = async (point: string) => {
const { data: { default_branch: defaultBranch } } = await octokit.repos.get({ const { data: { default_branch: defaultBranch } } = await octokit.repos.get({
owner: 'electron', owner: 'electron',
repo: 'electron' repo: 'electron'
@ -57,7 +57,7 @@ const getTagsOnBranch = async (point) => {
return (await getTagsOf(point)).filter(tag => !mainTagsSet.has(tag)); return (await getTagsOf(point)).filter(tag => !mainTagsSet.has(tag));
}; };
const getBranchOf = async (point) => { const getBranchOf = async (point: string) => {
try { try {
const branches = (await runGit(['branch', '-a', '--contains', point])) const branches = (await runGit(['branch', '-a', '--contains', point]))
.split('\n') .split('\n')
@ -89,11 +89,11 @@ const getStabilizationBranches = async () => {
return (await getAllBranches()).filter(branch => /^origin\/\d+-x-y$/.test(branch)); return (await getAllBranches()).filter(branch => /^origin\/\d+-x-y$/.test(branch));
}; };
const getPreviousStabilizationBranch = async (current) => { const getPreviousStabilizationBranch = async (current: string) => {
const stabilizationBranches = (await getStabilizationBranches()) const stabilizationBranches = (await getStabilizationBranches())
.filter(branch => branch !== current && branch !== `origin/${current}`); .filter(branch => branch !== current && branch !== `origin/${current}`);
if (!semver.valid(current)) { if (!valid(current)) {
// since we don't seem to be on a stabilization branch right now, // since we don't seem to be on a stabilization branch right now,
// pick a placeholder name that will yield the newest branch // pick a placeholder name that will yield the newest branch
// as a comparison point. // as a comparison point.
@ -102,20 +102,20 @@ const getPreviousStabilizationBranch = async (current) => {
let newestMatch = null; let newestMatch = null;
for (const branch of stabilizationBranches) { for (const branch of stabilizationBranches) {
if (semver.gte(semverify(branch), semverify(current))) { if (gte(semverify(branch), semverify(current))) {
continue; continue;
} }
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) { if (newestMatch && lte(semverify(branch), semverify(newestMatch))) {
continue; continue;
} }
newestMatch = branch; newestMatch = branch;
} }
return newestMatch; return newestMatch!;
}; };
const getPreviousPoint = async (point) => { const getPreviousPoint = async (point: string) => {
const currentBranch = await getBranchOf(point); const currentBranch = await getBranchOf(point);
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop(); const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()!;
const currentIsStable = tagIsStable(currentTag); const currentIsStable = tagIsStable(currentTag);
try { try {
@ -146,18 +146,18 @@ const getPreviousPoint = async (point) => {
} }
}; };
async function getReleaseNotes (range, newVersion, unique) { async function getReleaseNotes (range: string, newVersion?: string, unique?: boolean) {
const rangeList = range.split('..') || ['HEAD']; const rangeList = range.split('..') || ['HEAD'];
const to = rangeList.pop(); const to = rangeList.pop()!;
const from = rangeList.pop() || (await getPreviousPoint(to)); const from = rangeList.pop() || (await getPreviousPoint(to))!;
if (!newVersion) { if (!newVersion) {
newVersion = to; newVersion = to;
} }
const notes = await notesGenerator.get(from, to, newVersion); const notes = await get(from, to, newVersion);
const ret = { const ret: { text: string; warning?: string; } = {
text: notesGenerator.render(notes, unique) text: render(notes, unique)
}; };
if (notes.unknown.length) { if (notes.unknown.length) {
@ -168,13 +168,24 @@ async function getReleaseNotes (range, newVersion, unique) {
} }
async function main () { async function main () {
const opts = minimist(process.argv.slice(2), { const { values: { help, unique, version }, positionals } = parseArgs({
boolean: ['help', 'unique'], options: {
string: ['version'] help: {
type: 'boolean'
},
unique: {
type: 'boolean'
},
version: {
type: 'string'
}
},
allowPositionals: true
}); });
opts.range = opts._.shift();
if (opts.help || !opts.range) { const range = positionals.shift();
const name = path.basename(process.argv[1]); if (help || !range) {
const name = basename(process.argv[1]);
console.log(` console.log(`
easy usage: ${name} version easy usage: ${name} version
@ -194,7 +205,7 @@ For example, these invocations are equivalent:
return 0; return 0;
} }
const notes = await getReleaseNotes(opts.range, opts.version, opts.unique); const notes = await getReleaseNotes(range, version, unique);
console.log(notes.text); console.log(notes.text);
if (notes.warning) { if (notes.warning) {
throw new Error(notes.warning); throw new Error(notes.warning);
@ -208,4 +219,4 @@ if (require.main === module) {
}); });
} }
module.exports = getReleaseNotes; export default getReleaseNotes;

View file

@ -1,16 +1,13 @@
#!/usr/bin/env node #!/usr/bin/env node
'use strict'; import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
import { resolve as _resolve } from 'node:path';
const fs = require('node:fs'); import { Octokit } from '@octokit/rest';
const path = require('node:path'); import { GitProcess } from 'dugite';
const { GitProcess } = require('dugite'); import { ELECTRON_DIR } from '../../lib/utils';
import { createGitHubTokenStrategy } from '../github-token';
const { Octokit } = require('@octokit/rest');
const { ELECTRON_DIR } = require('../../lib/utils');
const { createGitHubTokenStrategy } = require('../github-token');
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy('electron') authStrategy: createGitHubTokenStrategy('electron')
@ -26,24 +23,52 @@ const NO_NOTES = 'No notes';
const docTypes = new Set(['doc', 'docs']); const docTypes = new Set(['doc', 'docs']);
const featTypes = new Set(['feat', 'feature']); const featTypes = new Set(['feat', 'feature']);
const fixTypes = new Set(['fix']); const fixTypes = new Set(['fix']);
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'perf', 'style', 'ci']); const otherTypes = new Set([
const knownTypes = new Set([...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]); 'spec',
'build',
'test',
'chore',
'deps',
'refactor',
'tools',
'perf',
'style',
'ci'
]);
const knownTypes = new Set([
...docTypes.keys(),
...featTypes.keys(),
...fixTypes.keys(),
...otherTypes.keys()
]);
const getCacheDir = () => process.env.NOTES_CACHE_PATH || path.resolve(__dirname, '.cache'); const getCacheDir = () =>
process.env.NOTES_CACHE_PATH || _resolve(__dirname, '.cache');
/** /**
*** ***
**/ **/
type MinimalPR = {
title: string;
body: string | null;
number: number;
labels: {
name: string;
}[];
base: { repo: { name: string; owner: { login: string } } };
};
// link to a GitHub item, e.g. an issue or pull request // link to a GitHub item, e.g. an issue or pull request
class GHKey { class GHKey {
constructor (owner, repo, number) { // eslint-disable-next-line no-useless-constructor
this.owner = owner; constructor (
this.repo = repo; public readonly owner: string,
this.number = number; public readonly repo: string,
} public readonly number: number
) {}
static NewFromPull (pull) { static NewFromPull (pull: MinimalPR) {
const owner = pull.base.repo.owner.login; const owner = pull.base.repo.owner.login;
const repo = pull.base.repo.name; const repo = pull.base.repo.name;
const number = pull.number; const number = pull.number;
@ -52,38 +77,33 @@ class GHKey {
} }
class Commit { class Commit {
constructor (hash, owner, repo) { public isBreakingChange = false;
this.hash = hash; // string public note: string | null = null;
this.owner = owner; // string public trops = new Map<string, GHKey>();
this.repo = repo; // string public readonly prKeys = new Set<GHKey>();
public revertHash: string | null = null;
public semanticType: string | null = null;
public subject: string | null = null;
this.isBreakingChange = false; // eslint-disable-next-line no-useless-constructor
this.note = null; // string constructor (
public readonly hash: string,
// A set of branches to which this change has been merged. public readonly owner: string,
// '8-x-y' => GHKey { owner: 'electron', repo: 'electron', number: 23714 } public readonly repo: string
this.trops = new Map(); // Map<string,GHKey> ) {}
this.prKeys = new Set(); // GHKey
this.revertHash = null; // string
this.semanticType = null; // string
this.subject = null; // string
}
} }
class Pool { class Pool {
constructor () { public commits: Commit[] = [];
this.commits = []; // Array<Commit> public processedHashes = new Set<string>();
this.processedHashes = new Set(); public pulls: Record<number, MinimalPR> = Object.create(null);
this.pulls = {}; // GHKey.number => octokit pull object
}
} }
/** /**
*** ***
**/ **/
const runGit = async (dir, args) => { const runGit = async (dir: string, args: string[]) => {
const response = await GitProcess.exec(args, dir); const response = await GitProcess.exec(args, dir);
if (response.exitCode !== 0) { if (response.exitCode !== 0) {
throw new Error(response.stderr.trim()); throw new Error(response.stderr.trim());
@ -91,11 +111,15 @@ const runGit = async (dir, args) => {
return response.stdout.trim(); return response.stdout.trim();
}; };
const getCommonAncestor = async (dir, point1, point2) => { const getCommonAncestor = async (
dir: string,
point1: string,
point2: string
) => {
return runGit(dir, ['merge-base', point1, point2]); return runGit(dir, ['merge-base', point1, point2]);
}; };
const getNoteFromClerk = async (ghKey) => { const getNoteFromClerk = async (ghKey: GHKey) => {
const comments = await getComments(ghKey); const comments = await getComments(ghKey);
if (!comments || !comments.data) return; if (!comments || !comments.data) return;
@ -105,28 +129,29 @@ const getNoteFromClerk = async (ghKey) => {
const QUOTE_LEAD = '> '; const QUOTE_LEAD = '> ';
for (const comment of comments.data.reverse()) { for (const comment of comments.data.reverse()) {
if (comment.user.login !== CLERK_LOGIN) { if (comment.user?.login !== CLERK_LOGIN) {
continue; continue;
} }
if (comment.body === CLERK_NO_NOTES) { if (comment.body === CLERK_NO_NOTES) {
return NO_NOTES; return NO_NOTES;
} }
if (comment.body.startsWith(PERSIST_LEAD)) { if (comment.body?.startsWith(PERSIST_LEAD)) {
let lines = comment.body let lines = comment.body
.slice(PERSIST_LEAD.length).trim() // remove PERSIST_LEAD .slice(PERSIST_LEAD.length)
.trim() // remove PERSIST_LEAD
.split(/\r?\n/) // split into lines .split(/\r?\n/) // split into lines
.map(line => line.trim()) .map((line) => line.trim())
.map(line => line.replace('&lt;', '<')) .map((line) => line.replace('&lt;', '<'))
.map(line => line.replace('&gt;', '>')) .map((line) => line.replace('&gt;', '>'))
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted .filter((line) => line.startsWith(QUOTE_LEAD)) // notes are quoted
.map(line => line.slice(QUOTE_LEAD.length)); // unquote the lines .map((line) => line.slice(QUOTE_LEAD.length)); // unquote the lines
const firstLine = lines.shift(); const firstLine = lines.shift();
// indent anything after the first line to ensure that // indent anything after the first line to ensure that
// multiline notes with their own sub-lists don't get // multiline notes with their own sub-lists don't get
// parsed in the markdown as part of the top-level list // parsed in the markdown as part of the top-level list
// (example: https://github.com/electron/electron/pull/25216) // (example: https://github.com/electron/electron/pull/25216)
lines = lines.map(line => ' ' + line); lines = lines.map((line) => ' ' + line);
return [firstLine, ...lines] return [firstLine, ...lines]
.join('\n') // join the lines .join('\n') // join the lines
.trim(); .trim();
@ -146,7 +171,7 @@ const getNoteFromClerk = async (ghKey) => {
* line starting with 'BREAKING CHANGE' in body -- sets isBreakingChange * line starting with 'BREAKING CHANGE' in body -- sets isBreakingChange
* 'Backport of #99999' -- sets pr * 'Backport of #99999' -- sets pr
*/ */
const parseCommitMessage = (commitMessage, commit) => { const parseCommitMessage = (commitMessage: string, commit: Commit) => {
const { owner, repo } = commit; const { owner, repo } = commit;
// split commitMessage into subject & body // split commitMessage into subject & body
@ -180,23 +205,32 @@ const parseCommitMessage = (commitMessage, commit) => {
} }
// Check for a comment that indicates a PR // Check for a comment that indicates a PR
const backportPattern = /(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im; const backportPattern =
/(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im;
if ((match = commitMessage.match(backportPattern))) { if ((match = commitMessage.match(backportPattern))) {
// This might be the first or second capture group depending on if it's a link or not. // This might be the first or second capture group depending on if it's a link or not.
const backportNumber = match[1] ? parseInt(match[1], 10) : parseInt(match[2], 10); const backportNumber = match[1]
? parseInt(match[1], 10)
: parseInt(match[2], 10);
commit.prKeys.add(new GHKey(owner, repo, backportNumber)); commit.prKeys.add(new GHKey(owner, repo, backportNumber));
} }
// https://help.github.com/articles/closing-issues-using-keywords/ // https://help.github.com/articles/closing-issues-using-keywords/
if (body.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i)) { if (
body.match(
/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i
)
) {
commit.semanticType = commit.semanticType || 'fix'; commit.semanticType = commit.semanticType || 'fix';
} }
// https://www.conventionalcommits.org/en // https://www.conventionalcommits.org/en
if (commitMessage if (
commitMessage
.split(/\r?\n/) // split into lines .split(/\r?\n/) // split into lines
.map(line => line.trim()) .map((line) => line.trim())
.some(line => line.startsWith('BREAKING CHANGE'))) { .some((line) => line.startsWith('BREAKING CHANGE'))
) {
commit.isBreakingChange = true; commit.isBreakingChange = true;
} }
@ -209,76 +243,109 @@ const parseCommitMessage = (commitMessage, commit) => {
return commit; return commit;
}; };
const parsePullText = (pull, commit) => parseCommitMessage(`${pull.data.title}\n\n${pull.data.body}`, commit); const parsePullText = (pull: MinimalPR, commit: Commit) =>
parseCommitMessage(`${pull.title}\n\n${pull.body}`, commit);
const getLocalCommitHashes = async (dir, ref) => { const getLocalCommitHashes = async (dir: string, ref: string) => {
const args = ['log', '--format=%H', ref]; const args = ['log', '--format=%H', ref];
return (await runGit(dir, args)) return (await runGit(dir, args))
.split(/\r?\n/) // split into lines .split(/\r?\n/) // split into lines
.map(hash => hash.trim()); .map((hash) => hash.trim());
}; };
// return an array of Commits // return an array of Commits
const getLocalCommits = async (module, point1, point2) => { const getLocalCommits = async (
module: LocalRepo,
point1: string,
point2: string
) => {
const { owner, repo, dir } = module; const { owner, repo, dir } = module;
const fieldSep = ','; const fieldSep = ',';
const format = ['%H', '%s'].join(fieldSep); const format = ['%H', '%s'].join(fieldSep);
const args = ['log', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`]; const args = [
'log',
'--cherry-pick',
'--right-only',
'--first-parent',
`--format=${format}`,
`${point1}..${point2}`
];
const logs = (await runGit(dir, args)) const logs = (await runGit(dir, args))
.split(/\r?\n/) // split into lines .split(/\r?\n/) // split into lines
.map(field => field.trim()); .map((field) => field.trim());
const commits = []; const commits = [];
for (const log of logs) { for (const log of logs) {
if (!log) { if (!log) {
continue; continue;
} }
const [hash, subject] = log.split(fieldSep, 2).map(field => field.trim()); const [hash, subject] = log.split(fieldSep, 2).map((field) => field.trim());
commits.push(parseCommitMessage(subject, new Commit(hash, owner, repo))); commits.push(parseCommitMessage(subject, new Commit(hash, owner, repo)));
} }
return commits; return commits;
}; };
const checkCache = async (name, operation) => { const checkCache = async <T>(
const filename = path.resolve(getCacheDir(), name); name: string,
if (fs.existsSync(filename)) { operation: () => Promise<T>
return JSON.parse(fs.readFileSync(filename, 'utf8')); ): Promise<T> => {
const filename = _resolve(getCacheDir(), name);
if (existsSync(filename)) {
return JSON.parse(readFileSync(filename, 'utf8'));
} }
process.stdout.write('.'); process.stdout.write('.');
const response = await operation(); const response = await operation();
if (response) { if (response) {
fs.writeFileSync(filename, JSON.stringify(response)); writeFileSync(filename, JSON.stringify(response));
} }
return response; return response;
}; };
// helper function to add some resiliency to volatile GH api endpoints // helper function to add some resiliency to volatile GH api endpoints
async function runRetryable (fn, maxRetries) { async function runRetryable<T> (
let lastError; fn: () => Promise<T>,
maxRetries: number
): Promise<T | null> {
let lastError: Error & { status?: number };
for (let i = 0; i < maxRetries; i++) { for (let i = 0; i < maxRetries; i++) {
try { try {
return await fn(); return await fn();
} catch (error) { } catch (error) {
await new Promise(resolve => setTimeout(resolve, CHECK_INTERVAL)); await new Promise((resolve) => setTimeout(resolve, CHECK_INTERVAL));
lastError = error; lastError = error as any;
} }
} }
// Silently eat 404s. // Silently eat 404s.
// Silently eat 422s, which come from "No commit found for SHA" // Silently eat 422s, which come from "No commit found for SHA"
if (lastError.status !== 404 && lastError.status !== 422) throw lastError; // eslint-disable-next-line no-throw-literal
if (lastError!.status !== 404 && lastError!.status !== 422) throw lastError!;
return null;
} }
const getPullCacheFilename = ghKey => `${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`; const getPullCacheFilename = (ghKey: GHKey) =>
`${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
const getCommitPulls = async (owner, repo, hash) => { const getCommitPulls = async (owner: string, repo: string, hash: string) => {
const name = `${owner}-${repo}-commit-${hash}`; const name = `${owner}-${repo}-commit-${hash}`;
const retryableFunc = () => octokit.repos.listPullRequestsAssociatedWithCommit({ owner, repo, commit_sha: hash }); const retryableFunc = async () => {
let ret = await checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT)); const { data } = await octokit.repos.listPullRequestsAssociatedWithCommit({
owner,
repo,
commit_sha: hash
});
return {
data
};
};
let ret = await checkCache(name, () =>
runRetryable(retryableFunc, MAX_FAIL_COUNT)
);
// only merged pulls belong in release notes // only merged pulls belong in release notes
if (ret && ret.data) { if (ret && ret.data) {
ret.data = ret.data.filter(pull => pull.merged_at); ret.data = ret.data.filter((pull) => pull.merged_at);
} }
// cache the pulls // cache the pulls
@ -286,7 +353,7 @@ const getCommitPulls = async (owner, repo, hash) => {
for (const pull of ret.data) { for (const pull of ret.data) {
const cachefile = getPullCacheFilename(GHKey.NewFromPull(pull)); const cachefile = getPullCacheFilename(GHKey.NewFromPull(pull));
const payload = { ...ret, data: pull }; const payload = { ...ret, data: pull };
await checkCache(cachefile, () => payload); await checkCache(cachefile, async () => payload);
} }
} }
@ -298,21 +365,39 @@ const getCommitPulls = async (owner, repo, hash) => {
return ret; return ret;
}; };
const getPullRequest = async (ghKey) => { const getPullRequest = async (ghKey: GHKey) => {
const { number, owner, repo } = ghKey; const { number, owner, repo } = ghKey;
const name = getPullCacheFilename(ghKey); const name = getPullCacheFilename(ghKey);
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo }); const retryableFunc = () =>
octokit.pulls.get({ pull_number: number, owner, repo });
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT)); return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
}; };
const getComments = async (ghKey) => { const getComments = async (ghKey: GHKey) => {
const { number, owner, repo } = ghKey; const { number, owner, repo } = ghKey;
const name = `${owner}-${repo}-issue-${number}-comments`; const name = `${owner}-${repo}-issue-${number}-comments`;
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 }); const retryableFunc = () =>
octokit.issues.listComments({
issue_number: number,
owner,
repo,
per_page: 100
});
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT)); return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
}; };
const addRepoToPool = async (pool, repo, from, to) => { type LocalRepo = {
owner: string;
repo: string;
dir: string;
};
const addRepoToPool = async (
pool: Pool,
repo: LocalRepo,
from: string,
to: string
) => {
const commonAncestor = await getCommonAncestor(repo.dir, from, to); const commonAncestor = await getCommonAncestor(repo.dir, from, to);
// mark the old branch's commits as old news // mark the old branch's commits as old news
@ -337,42 +422,59 @@ const addRepoToPool = async (pool, repo, from, to) => {
for (prKey of commit.prKeys.values()) { for (prKey of commit.prKeys.values()) {
const pull = await getPullRequest(prKey); const pull = await getPullRequest(prKey);
if (!pull || !pull.data) continue; // couldn't get it if (!pull || !pull.data) continue; // couldn't get it
pool.pulls[prKey.number] = pull; pool.pulls[prKey.number] = pull.data;
parsePullText(pull, commit); parsePullText(pull.data, commit);
} }
} }
}; };
type MinimalComment = {
user: {
login: string;
} | null;
body?: string;
};
// @return Map<string,GHKey> // @return Map<string,GHKey>
// where the key is a branch name (e.g. '7-1-x' or '8-x-y') // where the key is a branch name (e.g. '7-1-x' or '8-x-y')
// and the value is a GHKey to the PR // and the value is a GHKey to the PR
async function getMergedTrops (commit, pool) { async function getMergedTrops (commit: Commit, pool: Pool) {
const branches = new Map(); const branches = new Map();
for (const prKey of commit.prKeys.values()) { for (const prKey of commit.prKeys.values()) {
const pull = pool.pulls[prKey.number]; const pull = pool.pulls[prKey.number];
const mergedBranches = new Set( const mergedBranches = new Set(
((pull && pull.data && pull.data.labels) ? pull.data.labels : []) (pull && pull && pull.labels ? pull.labels : [])
.map(label => ((label && label.name) ? label.name : '').match(/merged\/([0-9]+-[x0-9]-[xy0-9])/)) .map((label) =>
.filter(match => match) (label && label.name ? label.name : '').match(
.map(match => match[1]) /merged\/([0-9]+-[x0-9]-[xy0-9])/
)
)
.filter((match) => !!match)
.map((match) => match[1])
); );
if (mergedBranches.size > 0) { if (mergedBranches.size > 0) {
const isTropComment = (comment) => comment && comment.user && comment.user.login === TROP_LOGIN; const isTropComment = (comment: MinimalComment | null) =>
comment && comment.user && comment.user.login === TROP_LOGIN;
const ghKey = GHKey.NewFromPull(pull.data); const ghKey = GHKey.NewFromPull(pull);
const backportRegex = /backported this PR to "(.*)",\s+please check out #(\d+)/; const backportRegex =
const getBranchNameAndPullKey = (comment) => { /backported this PR to "(.*)",\s+please check out #(\d+)/;
const match = ((comment && comment.body) ? comment.body : '').match(backportRegex); const getBranchNameAndPullKey = (comment: MinimalComment) => {
return match ? [match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))] : null; const match = (comment && comment.body ? comment.body : '').match(
backportRegex
);
return match
? <const>[match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))]
: null;
}; };
const comments = await getComments(ghKey); const comments = await getComments(ghKey);
((comments && comments.data) ? comments.data : []) (comments && comments.data ? comments.data : [])
.filter(isTropComment) .filter(isTropComment)
.map(getBranchNameAndPullKey) .map(getBranchNameAndPullKey)
.filter(pair => pair) .filter((pair) => !!pair)
.filter(([branch]) => mergedBranches.has(branch)) .filter(([branch]) => mergedBranches.has(branch))
.forEach(([branch, key]) => branches.set(branch, key)); .forEach(([branch, key]) => branches.set(branch, key));
} }
@ -383,12 +485,20 @@ async function getMergedTrops (commit, pool) {
// @return the shorthand name of the branch that `ref` is on, // @return the shorthand name of the branch that `ref` is on,
// e.g. a ref of '10.0.0-beta.1' will return '10-x-y' // e.g. a ref of '10.0.0-beta.1' will return '10-x-y'
async function getBranchNameOfRef (ref, dir) { async function getBranchNameOfRef (ref: string, dir: string) {
return (await runGit(dir, ['branch', '--all', '--contains', ref, '--sort', 'version:refname'])) const result = await runGit(dir, [
'branch',
'--all',
'--contains',
ref,
'--sort',
'version:refname'
]);
return result
.split(/\r?\n/) // split into lines .split(/\r?\n/) // split into lines
.shift() // we sorted by refname and want the first result .shift()! // we sorted by refname and want the first result
.match(/(?:\s?\*\s){0,1}(.*)/)[1] // if present, remove leading '* ' in case we're currently in that branch .match(/(?:\s?\*\s){0,1}(.*)/)![1] // if present, remove leading '* ' in case we're currently in that branch
.match(/(?:.*\/)?(.*)/)[1] // 'remote/origins/10-x-y' -> '10-x-y' .match(/(?:.*\/)?(.*)/)![1] // 'remote/origins/10-x-y' -> '10-x-y'
.trim(); .trim();
} }
@ -396,23 +506,27 @@ async function getBranchNameOfRef (ref, dir) {
**** Main **** Main
***/ ***/
const getNotes = async (fromRef, toRef, newVersion) => { const getNotes = async (fromRef: string, toRef: string, newVersion: string) => {
const cacheDir = getCacheDir(); const cacheDir = getCacheDir();
if (!fs.existsSync(cacheDir)) { if (!existsSync(cacheDir)) {
fs.mkdirSync(cacheDir); mkdirSync(cacheDir);
} }
const pool = new Pool(); const pool = new Pool();
const toBranch = await getBranchNameOfRef(toRef, ELECTRON_DIR); const toBranch = await getBranchNameOfRef(toRef, ELECTRON_DIR);
console.log(`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`); console.log(
`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`
);
// get the electron/electron commits // get the electron/electron commits
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_DIR }; const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_DIR };
await addRepoToPool(pool, electron, fromRef, toRef); await addRepoToPool(pool, electron, fromRef, toRef);
// remove any old commits // remove any old commits
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash)); pool.commits = pool.commits.filter(
(commit) => !pool.processedHashes.has(commit.hash)
);
// if a commit _and_ revert occurred in the unprocessed set, skip them both // if a commit _and_ revert occurred in the unprocessed set, skip them both
for (const commit of pool.commits) { for (const commit of pool.commits) {
@ -421,7 +535,7 @@ const getNotes = async (fromRef, toRef, newVersion) => {
continue; continue;
} }
const revert = pool.commits.find(commit => commit.hash === revertHash); const revert = pool.commits.find((commit) => commit.hash === revertHash);
if (!revert) { if (!revert) {
continue; continue;
} }
@ -438,15 +552,15 @@ const getNotes = async (fromRef, toRef, newVersion) => {
if (commit.note) { if (commit.note) {
break; break;
} }
commit.note = await getNoteFromClerk(prKey); commit.note = await getNoteFromClerk(prKey) || null;
} }
} }
// remove non-user-facing commits // remove non-user-facing commits
pool.commits = pool.commits pool.commits = pool.commits
.filter(commit => commit && commit.note) .filter((commit) => commit && commit.note)
.filter(commit => commit.note !== NO_NOTES) .filter((commit) => commit.note !== NO_NOTES)
.filter(commit => commit.note.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null); .filter((commit) => commit.note!.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
for (const commit of pool.commits) { for (const commit of pool.commits) {
commit.trops = await getMergedTrops(commit, pool); commit.trops = await getMergedTrops(commit, pool);
@ -455,12 +569,12 @@ const getNotes = async (fromRef, toRef, newVersion) => {
pool.commits = removeSupercededStackUpdates(pool.commits); pool.commits = removeSupercededStackUpdates(pool.commits);
const notes = { const notes = {
breaking: [], breaking: [] as Commit[],
docs: [], docs: [] as Commit[],
feat: [], feat: [] as Commit[],
fix: [], fix: [] as Commit[],
other: [], other: [] as Commit[],
unknown: [], unknown: [] as Commit[],
name: newVersion, name: newVersion,
toBranch toBranch
}; };
@ -487,13 +601,13 @@ const getNotes = async (fromRef, toRef, newVersion) => {
return notes; return notes;
}; };
const removeSupercededStackUpdates = (commits) => { const removeSupercededStackUpdates = (commits: Commit[]) => {
const updateRegex = /^Updated ([a-zA-Z.]+) to v?([\d.]+)/; const updateRegex = /^Updated ([a-zA-Z.]+) to v?([\d.]+)/;
const notupdates = []; const notupdates = [];
const newest = {}; const newest: Record<string, { commit: Commit; version: string }> = Object.create(null);
for (const commit of commits) { for (const commit of commits) {
const match = (commit.note || commit.subject).match(updateRegex); const match = (commit.note || commit.subject)?.match(updateRegex);
if (!match) { if (!match) {
notupdates.push(commit); notupdates.push(commit);
continue; continue;
@ -504,7 +618,7 @@ const removeSupercededStackUpdates = (commits) => {
} }
} }
return [...notupdates, ...Object.values(newest).map(o => o.commit)]; return [...notupdates, ...Object.values(newest).map((o) => o.commit)];
}; };
/*** /***
@ -512,40 +626,48 @@ const removeSupercededStackUpdates = (commits) => {
***/ ***/
// @return the pull request's GitHub URL // @return the pull request's GitHub URL
const buildPullURL = ghKey => `https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`; const buildPullURL = (ghKey: GHKey) =>
`https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
const renderPull = ghKey => `[#${ghKey.number}](${buildPullURL(ghKey)})`; const renderPull = (ghKey: GHKey) =>
`[#${ghKey.number}](${buildPullURL(ghKey)})`;
// @return the commit's GitHub URL // @return the commit's GitHub URL
const buildCommitURL = commit => `https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`; const buildCommitURL = (commit: Commit) =>
`https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
const renderCommit = commit => `[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`; const renderCommit = (commit: Commit) =>
`[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
// @return a markdown link to the PR if available; otherwise, the git commit // @return a markdown link to the PR if available; otherwise, the git commit
function renderLink (commit) { function renderLink (commit: Commit) {
const maybePull = commit.prKeys.values().next(); const maybePull = commit.prKeys.values().next();
return maybePull.value ? renderPull(maybePull.value) : renderCommit(commit); return maybePull.value ? renderPull(maybePull.value) : renderCommit(commit);
} }
// @return a terser branch name, // @return a terser branch name,
// e.g. '7-2-x' -> '7.2' and '8-x-y' -> '8' // e.g. '7-2-x' -> '7.2' and '8-x-y' -> '8'
const renderBranchName = name => name.replace(/-[a-zA-Z]/g, '').replace('-', '.'); const renderBranchName = (name: string) =>
name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
const renderTrop = (branch, ghKey) => `[${renderBranchName(branch)}](${buildPullURL(ghKey)})`; const renderTrop = (branch: string, ghKey: GHKey) =>
`[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
// @return markdown-formatted links to other branches' trops, // @return markdown-formatted links to other branches' trops,
// e.g. "(Also in 7.2, 8, 9)" // e.g. "(Also in 7.2, 8, 9)"
function renderTrops (commit, excludeBranch) { function renderTrops (commit: Commit, excludeBranch: string) {
const body = [...commit.trops.entries()] const body = [...commit.trops.entries()]
.filter(([branch]) => branch !== excludeBranch) .filter(([branch]) => branch !== excludeBranch)
.sort(([branchA], [branchB]) => parseInt(branchA) - parseInt(branchB)) // sort by semver major .sort(([branchA], [branchB]) => parseInt(branchA) - parseInt(branchB)) // sort by semver major
.map(([branch, key]) => renderTrop(branch, key)) .map(([branch, key]) => renderTrop(branch, key))
.join(', '); .join(', ');
return body ? `<span style="font-size:small;">(Also in ${body})</span>` : body; return body
? `<span style="font-size:small;">(Also in ${body})</span>`
: body;
} }
// @return a slightly cleaned-up human-readable change description // @return a slightly cleaned-up human-readable change description
function renderDescription (commit) { function renderDescription (commit: Commit) {
let note = commit.note || commit.subject || ''; let note = commit.note || commit.subject || '';
note = note.trim(); note = note.trim();
@ -597,21 +719,26 @@ function renderDescription (commit) {
// @return markdown-formatted release note line item, // @return markdown-formatted release note line item,
// e.g. '* Fixed a foo. #12345 (Also in 7.2, 8, 9)' // e.g. '* Fixed a foo. #12345 (Also in 7.2, 8, 9)'
const renderNote = (commit, excludeBranch) => const renderNote = (commit: Commit, excludeBranch: string) =>
`* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(commit, excludeBranch)}\n`; `* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(
commit,
excludeBranch
)}\n`;
const renderNotes = (notes, unique = false) => { const renderNotes = (notes: Awaited<ReturnType<typeof getNotes>>, unique = false) => {
const rendered = [`# Release Notes for ${notes.name}\n\n`]; const rendered = [`# Release Notes for ${notes.name}\n\n`];
const renderSection = (title, commits, unique) => { const renderSection = (title: string, commits: Commit[], unique: boolean) => {
if (unique) { if (unique) {
// omit changes that also landed in other branches // omit changes that also landed in other branches
commits = commits.filter((commit) => renderTrops(commit, notes.toBranch).length === 0); commits = commits.filter(
(commit) => renderTrops(commit, notes.toBranch).length === 0
);
} }
if (commits.length > 0) { if (commits.length > 0) {
rendered.push( rendered.push(
`## ${title}\n\n`, `## ${title}\n\n`,
...(commits.map(commit => renderNote(commit, notes.toBranch)).sort()) ...commits.map((commit) => renderNote(commit, notes.toBranch)).sort()
); );
} }
}; };
@ -622,8 +749,12 @@ const renderNotes = (notes, unique = false) => {
renderSection('Other Changes', notes.other, unique); renderSection('Other Changes', notes.other, unique);
if (notes.docs.length) { if (notes.docs.length) {
const docs = notes.docs.map(commit => renderLink(commit)).sort(); const docs = notes.docs.map((commit) => renderLink(commit)).sort();
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n'); rendered.push(
'## Documentation\n\n',
` * Documentation changes: ${docs.join(', ')}\n`,
'\n'
);
} }
renderSection('Unknown', notes.unknown, unique); renderSection('Unknown', notes.unknown, unique);
@ -635,7 +766,5 @@ const renderNotes = (notes, unique = false) => {
**** Module **** Module
***/ ***/
module.exports = { export const get = getNotes;
get: getNotes, export const render = renderNotes;
render: renderNotes
};

View file

@ -1,24 +1,44 @@
#!/usr/bin/env node #!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load(); import { Octokit } from '@octokit/rest';
const args = require('minimist')(process.argv.slice(2), { import { GitProcess } from 'dugite';
boolean: ['automaticRelease', 'notesOnly', 'stable'] import { execSync } from 'node:child_process';
}); import { join } from 'node:path';
const ciReleaseBuild = require('./ci-release-build'); import { createInterface } from 'node:readline';
const { Octokit } = require('@octokit/rest'); import { parseArgs } from 'node:util';
const { execSync } = require('node:child_process');
const { GitProcess } = require('dugite');
const path = require('node:path'); import ciReleaseBuild from './ci-release-build';
const readline = require('node:readline'); import releaseNotesGenerator from './notes';
const releaseNotesGenerator = require('./notes/index.js'); import { getCurrentBranch, ELECTRON_DIR } from '../lib/utils.js';
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js'); import { createGitHubTokenStrategy } from './github-token';
const { createGitHubTokenStrategy } = require('./github-token'); import { ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
const bumpType = args._[0];
const { values: { notesOnly, dryRun: dryRunArg, stable: isStableArg, branch: branchArg, automaticRelease }, positionals } = parseArgs({
options: {
notesOnly: {
type: 'boolean'
},
dryRun: {
type: 'boolean'
},
stable: {
type: 'boolean'
},
branch: {
type: 'string'
},
automaticRelease: {
type: 'boolean'
}
},
allowPositionals: true
});
const bumpType = positionals[0];
const targetRepo = getRepo(); const targetRepo = getRepo();
function getRepo () { function getRepo (): ElectronReleaseRepo {
return bumpType === 'nightly' ? 'nightlies' : 'electron'; return bumpType === 'nightly' ? NIGHTLY_REPO : ELECTRON_REPO;
} }
const octokit = new Octokit({ const octokit = new Octokit({
@ -29,24 +49,34 @@ require('colors');
const pass = '✓'.green; const pass = '✓'.green;
const fail = '✗'.red; const fail = '✗'.red;
if (!bumpType && !args.notesOnly) { if (!bumpType && !notesOnly) {
console.log('Usage: prepare-release [stable | minor | beta | alpha | nightly]' + console.log('Usage: prepare-release [stable | minor | beta | alpha | nightly]' +
' (--stable) (--notesOnly) (--automaticRelease) (--branch)'); ' (--stable) (--notesOnly) (--automaticRelease) (--branch)');
process.exit(1); process.exit(1);
} }
async function getNewVersion (dryRun) { enum DryRunMode {
if (!dryRun) { DRY_RUN,
REAL_RUN,
}
async function getNewVersion (dryRunMode: DryRunMode) {
if (dryRunMode === DryRunMode.REAL_RUN) {
console.log(`Bumping for new "${bumpType}" version.`); console.log(`Bumping for new "${bumpType}" version.`);
} }
const bumpScript = path.join(__dirname, 'version-bumper.js'); const bumpScript = join(__dirname, 'version-bumper.ts');
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`]; const scriptArgs = [
if (dryRun) scriptArgs.push('--dryRun'); 'node',
'node_modules/.bin/ts-node',
bumpScript,
`--bump=${bumpType}`
];
if (dryRunMode === DryRunMode.DRY_RUN) scriptArgs.push('--dryRun');
try { try {
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' }); let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'utf-8' });
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim(); bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim();
const newVersion = `v${bumpVersion}`; const newVersion = `v${bumpVersion}`;
if (!dryRun) { if (dryRunMode === DryRunMode.REAL_RUN) {
console.log(`${pass} Successfully bumped version to ${newVersion}`); console.log(`${pass} Successfully bumped version to ${newVersion}`);
} }
return newVersion; return newVersion;
@ -56,7 +86,7 @@ async function getNewVersion (dryRun) {
} }
} }
async function getReleaseNotes (currentBranch, newVersion) { async function getReleaseNotes (currentBranch: string, newVersion: string) {
if (bumpType === 'nightly') { if (bumpType === 'nightly') {
return { text: 'Nightlies do not get release notes, please compare tags for info.' }; return { text: 'Nightlies do not get release notes, please compare tags for info.' };
} }
@ -68,8 +98,8 @@ async function getReleaseNotes (currentBranch, newVersion) {
return releaseNotes; return releaseNotes;
} }
async function createRelease (branchToTarget, isBeta) { async function createRelease (branchToTarget: string, isPreRelease: boolean) {
const newVersion = await getNewVersion(); const newVersion = await getNewVersion(DryRunMode.REAL_RUN);
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion); const releaseNotes = await getReleaseNotes(branchToTarget, newVersion);
await tagRelease(newVersion); await tagRelease(newVersion);
@ -79,6 +109,7 @@ async function createRelease (branchToTarget, isBeta) {
repo: targetRepo repo: targetRepo
}).catch(err => { }).catch(err => {
console.log(`${fail} Could not get releases. Error was: `, err); console.log(`${fail} Could not get releases. Error was: `, err);
throw err;
}); });
const drafts = releases.data.filter(release => release.draft && const drafts = releases.data.filter(release => release.draft &&
@ -92,7 +123,7 @@ async function createRelease (branchToTarget, isBeta) {
let releaseBody; let releaseBody;
let releaseIsPrelease = false; let releaseIsPrelease = false;
if (isBeta) { if (isPreRelease) {
if (newVersion.indexOf('nightly') > 0) { if (newVersion.indexOf('nightly') > 0) {
releaseBody = 'Note: This is a nightly release. Please file new issues ' + releaseBody = 'Note: This is a nightly release. Please file new issues ' +
'for any bugs you find in it.\n \n This release is published to npm ' + 'for any bugs you find in it.\n \n This release is published to npm ' +
@ -132,7 +163,7 @@ async function createRelease (branchToTarget, isBeta) {
console.log(`${pass} Draft release for ${newVersion} successful.`); console.log(`${pass} Draft release for ${newVersion} successful.`);
} }
async function pushRelease (branch) { async function pushRelease (branch: string) {
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR); const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
if (pushDetails.exitCode === 0) { if (pushDetails.exitCode === 0) {
console.log(`${pass} Successfully pushed the release. Wait for ` + console.log(`${pass} Successfully pushed the release. Wait for ` +
@ -143,14 +174,15 @@ async function pushRelease (branch) {
} }
} }
async function runReleaseBuilds (branch, newVersion) { async function runReleaseBuilds (branch: string, newVersion: string) {
await ciReleaseBuild(branch, { await ciReleaseBuild(branch, {
ci: undefined,
ghRelease: true, ghRelease: true,
newVersion newVersion
}); });
} }
async function tagRelease (version) { async function tagRelease (version: string) {
console.log(`Tagging release ${version}.`); console.log(`Tagging release ${version}.`);
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR); const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR);
if (checkoutDetails.exitCode === 0) { if (checkoutDetails.exitCode === 0) {
@ -163,9 +195,9 @@ async function tagRelease (version) {
} }
async function verifyNewVersion () { async function verifyNewVersion () {
const newVersion = await getNewVersion(true); const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
let response; let response;
if (args.automaticRelease) { if (automaticRelease) {
response = 'y'; response = 'y';
} else { } else {
response = await promptForVersion(newVersion); response = await promptForVersion(newVersion);
@ -180,9 +212,9 @@ async function verifyNewVersion () {
return newVersion; return newVersion;
} }
async function promptForVersion (version) { async function promptForVersion (version: string) {
return new Promise(resolve => { return new Promise<string>(resolve => {
const rl = readline.createInterface({ const rl = createInterface({
input: process.stdin, input: process.stdin,
output: process.stdout output: process.stdout
}); });
@ -200,21 +232,21 @@ async function changesToRelease () {
return !lastCommitWasRelease.test(lastCommit.stdout); return !lastCommitWasRelease.test(lastCommit.stdout);
} }
async function prepareRelease (isBeta, notesOnly) { async function prepareRelease (isPreRelease: boolean, dryRunMode: DryRunMode) {
if (args.dryRun) { if (dryRunMode === DryRunMode.DRY_RUN) {
const newVersion = await getNewVersion(true); const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
console.log(newVersion); console.log(newVersion);
} else { } else {
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR); const currentBranch = branchArg || await getCurrentBranch(ELECTRON_DIR);
if (notesOnly) { if (notesOnly) {
const newVersion = await getNewVersion(true); const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
const releaseNotes = await getReleaseNotes(currentBranch, newVersion); const releaseNotes = await getReleaseNotes(currentBranch, newVersion);
console.log(`Draft release notes are: \n${releaseNotes.text}`); console.log(`Draft release notes are: \n${releaseNotes.text}`);
} else { } else {
const changes = await changesToRelease(); const changes = await changesToRelease();
if (changes) { if (changes) {
const newVersion = await verifyNewVersion(); const newVersion = await verifyNewVersion();
await createRelease(currentBranch, isBeta); await createRelease(currentBranch, isPreRelease);
await pushRelease(currentBranch); await pushRelease(currentBranch);
await runReleaseBuilds(currentBranch, newVersion); await runReleaseBuilds(currentBranch, newVersion);
} else { } else {
@ -225,7 +257,7 @@ async function prepareRelease (isBeta, notesOnly) {
} }
} }
prepareRelease(!args.stable, args.notesOnly) prepareRelease(!isStableArg, dryRunArg ? DryRunMode.DRY_RUN : DryRunMode.REAL_RUN)
.catch((err) => { .catch((err) => {
console.error(err); console.error(err);
process.exit(1); process.exit(1);

View file

@ -1,23 +1,25 @@
const temp = require('temp'); import { Octokit } from '@octokit/rest';
const fs = require('node:fs'); import * as childProcess from 'node:child_process';
const path = require('node:path'); import * as fs from 'node:fs';
const childProcess = require('node:child_process'); import * as path from 'node:path';
const semver = require('semver'); import * as semver from 'semver';
import * as temp from 'temp';
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils'); import { getCurrentBranch, ELECTRON_DIR } from '../lib/utils';
const { getElectronVersion } = require('../lib/get-version'); import { getElectronVersion } from '../lib/get-version';
const rootPackageJson = require('../../package.json');
const { Octokit } = require('@octokit/rest'); import { getAssetContents } from './get-asset';
const { getAssetContents } = require('./get-asset'); import { createGitHubTokenStrategy } from './github-token';
const { createGitHubTokenStrategy } = require('./github-token'); import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
const rootPackageJson = JSON.parse(fs.readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8'));
if (!process.env.ELECTRON_NPM_OTP) { if (!process.env.ELECTRON_NPM_OTP) {
console.error('Please set ELECTRON_NPM_OTP'); console.error('Please set ELECTRON_NPM_OTP');
process.exit(1); process.exit(1);
} }
let tempDir; let tempDir: string;
temp.track(); // track and cleanup files at exit temp.track(); // track and cleanup files at exit
const files = [ const files = [
@ -49,11 +51,11 @@ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy(targetRepo) authStrategy: createGitHubTokenStrategy(targetRepo)
}); });
function getRepo () { function getRepo (): ElectronReleaseRepo {
return isNightlyElectronVersion ? 'nightlies' : 'electron'; return isNightlyElectronVersion ? NIGHTLY_REPO : ELECTRON_REPO;
} }
new Promise((resolve, reject) => { new Promise<string>((resolve, reject) => {
temp.mkdir('electron-npm', (err, dirPath) => { temp.mkdir('electron-npm', (err, dirPath) => {
if (err) { if (err) {
reject(err); reject(err);
@ -84,8 +86,8 @@ new Promise((resolve, reject) => {
); );
return octokit.repos.listReleases({ return octokit.repos.listReleases({
owner: 'electron', owner: ELECTRON_ORG,
repo: isNightlyElectronVersion ? 'nightlies' : 'electron' repo: targetRepo
}); });
}) })
.then((releases) => { .then((releases) => {
@ -124,7 +126,7 @@ new Promise((resolve, reject) => {
checksumsAsset.id checksumsAsset.id
); );
const checksumsObject = {}; const checksumsObject: Record<string, string> = Object.create(null);
for (const line of checksumsContent.trim().split('\n')) { for (const line of checksumsContent.trim().split('\n')) {
const [checksum, file] = line.split(' *'); const [checksum, file] = line.split(' *');
checksumsObject[file] = checksum; checksumsObject[file] = checksum;
@ -203,7 +205,7 @@ new Promise((resolve, reject) => {
}) })
.then(() => { .then(() => {
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString()); const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString());
const parsedLocalVersion = semver.parse(currentElectronVersion); const parsedLocalVersion = semver.parse(currentElectronVersion)!;
if (rootPackageJson.name === 'electron') { if (rootPackageJson.name === 'electron') {
// We should only customly add dist tags for non-nightly releases where the package name is still // We should only customly add dist tags for non-nightly releases where the package name is still
// "electron" // "electron"

View file

@ -1,25 +1,42 @@
#!/usr/bin/env node #!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load(); import { Octokit } from '@octokit/rest';
const args = require('minimist')(process.argv.slice(2), { import { parseArgs } from 'node:util';
string: ['tag', 'releaseID'],
default: { releaseID: '' } import { createGitHubTokenStrategy } from './github-token';
import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
const { values: { tag: _tag, releaseID } } = parseArgs({
options: {
tag: {
type: 'string'
},
releaseID: {
type: 'string',
default: ''
}
}
}); });
const { Octokit } = require('@octokit/rest');
const { createGitHubTokenStrategy } = require('./github-token'); if (!_tag) {
console.error('Missing --tag argument');
process.exit(1);
}
const tag = _tag;
require('colors'); require('colors');
const pass = '✓'.green; const pass = '✓'.green;
const fail = '✗'.red; const fail = '✗'.red;
async function deleteDraft (releaseId, targetRepo) { async function deleteDraft (releaseId: string, targetRepo: ElectronReleaseRepo) {
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy(targetRepo) authStrategy: createGitHubTokenStrategy(targetRepo)
}); });
try { try {
const result = await octokit.repos.getRelease({ const result = await octokit.repos.getRelease({
owner: 'electron', owner: ELECTRON_ORG,
repo: targetRepo, repo: targetRepo,
release_id: parseInt(releaseId, 10) release_id: parseInt(releaseId, 10)
}); });
@ -28,7 +45,7 @@ async function deleteDraft (releaseId, targetRepo) {
return false; return false;
} else { } else {
await octokit.repos.deleteRelease({ await octokit.repos.deleteRelease({
owner: 'electron', owner: ELECTRON_ORG,
repo: targetRepo, repo: targetRepo,
release_id: result.data.id release_id: result.data.id
}); });
@ -41,14 +58,14 @@ async function deleteDraft (releaseId, targetRepo) {
} }
} }
async function deleteTag (tag, targetRepo) { async function deleteTag (tag: string, targetRepo: ElectronReleaseRepo) {
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy(targetRepo) authStrategy: createGitHubTokenStrategy(targetRepo)
}); });
try { try {
await octokit.git.deleteRef({ await octokit.git.deleteRef({
owner: 'electron', owner: ELECTRON_ORG,
repo: targetRepo, repo: targetRepo,
ref: `tags/${tag}` ref: `tags/${tag}`
}); });
@ -59,31 +76,35 @@ async function deleteTag (tag, targetRepo) {
} }
async function cleanReleaseArtifacts () { async function cleanReleaseArtifacts () {
const releaseId = args.releaseID.length > 0 ? args.releaseID : null; const releaseId = releaseID && releaseID.length > 0 ? releaseID : null;
const isNightly = args.tag.includes('nightly'); const isNightly = tag.includes('nightly');
if (releaseId) { if (releaseId) {
if (isNightly) { if (isNightly) {
await deleteDraft(releaseId, 'nightlies'); await deleteDraft(releaseId, NIGHTLY_REPO);
// We only need to delete the Electron tag since the // We only need to delete the Electron tag since the
// nightly tag is only created at publish-time. // nightly tag is only created at publish-time.
await deleteTag(args.tag, 'electron'); await deleteTag(tag, ELECTRON_REPO);
} else { } else {
const deletedElectronDraft = await deleteDraft(releaseId, 'electron'); const deletedElectronDraft = await deleteDraft(releaseId, ELECTRON_REPO);
// don't delete tag unless draft deleted successfully // don't delete tag unless draft deleted successfully
if (deletedElectronDraft) { if (deletedElectronDraft) {
await deleteTag(args.tag, 'electron'); await deleteTag(tag, ELECTRON_REPO);
} }
} }
} else { } else {
await Promise.all([ await Promise.all([
deleteTag(args.tag, 'electron'), deleteTag(tag, ELECTRON_REPO),
deleteTag(args.tag, 'nightlies') deleteTag(tag, NIGHTLY_REPO)
]); ]);
} }
console.log(`${pass} failed release artifact cleanup complete`); console.log(`${pass} failed release artifact cleanup complete`);
} }
cleanReleaseArtifacts(); cleanReleaseArtifacts()
.catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -1,49 +1,51 @@
#!/usr/bin/env node #!/usr/bin/env node
if (!process.env.CI) require('dotenv-safe').load(); import { BlobServiceClient } from '@azure/storage-blob';
import { Octokit } from '@octokit/rest';
import got from 'got';
import { execSync, ExecSyncOptions } from 'node:child_process';
import { statSync, createReadStream, writeFileSync, close } from 'node:fs';
import { join } from 'node:path';
import { gte } from 'semver';
import { track as trackTemp } from 'temp';
const args = require('minimist')(process.argv.slice(2), { import { ELECTRON_DIR } from '../lib/utils';
boolean: [ import { getElectronVersion } from '../lib/get-version';
'validateRelease', import { getUrlHash } from './get-url-hash';
'verboseNugget' import { createGitHubTokenStrategy } from './github-token';
], import { ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
default: { verboseNugget: false } import { parseArgs } from 'node:util';
});
const fs = require('node:fs'); const temp = trackTemp();
const { execSync } = require('node:child_process');
const got = require('got');
const path = require('node:path');
const semver = require('semver');
const temp = require('temp').track();
const { BlobServiceClient } = require('@azure/storage-blob');
const { Octokit } = require('@octokit/rest');
require('colors'); require('colors');
const pass = '✓'.green; const pass = '✓'.green;
const fail = '✗'.red; const fail = '✗'.red;
const { ELECTRON_DIR } = require('../lib/utils');
const { getElectronVersion } = require('../lib/get-version');
const getUrlHash = require('./get-url-hash');
const { createGitHubTokenStrategy } = require('./github-token');
const pkgVersion = `v${getElectronVersion()}`; const pkgVersion = `v${getElectronVersion()}`;
const targetRepo = pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron'; function getRepo (): ElectronReleaseRepo {
return pkgVersion.indexOf('nightly') > 0 ? NIGHTLY_REPO : ELECTRON_REPO;
}
const targetRepo = getRepo();
let failureCount = 0; let failureCount = 0;
const octokit = new Octokit({ const octokit = new Octokit({
authStrategy: createGitHubTokenStrategy(targetRepo) authStrategy: createGitHubTokenStrategy(targetRepo)
}); });
async function getDraftRelease (version, skipValidation) { async function getDraftRelease (
version?: string,
skipValidation: boolean = false
) {
const releaseInfo = await octokit.repos.listReleases({ const releaseInfo = await octokit.repos.listReleases({
owner: 'electron', owner: 'electron',
repo: targetRepo repo: targetRepo
}); });
const versionToCheck = version || pkgVersion; const versionToCheck = version || pkgVersion;
const drafts = releaseInfo.data.filter(release => { const drafts = releaseInfo.data.filter((release) => {
return release.tag_name === versionToCheck && release.draft === true; return release.tag_name === versionToCheck && release.draft === true;
}); });
@ -54,29 +56,57 @@ async function getDraftRelease (version, skipValidation) {
if (versionToCheck.includes('beta')) { if (versionToCheck.includes('beta')) {
check(draft.prerelease, 'draft is a prerelease'); check(draft.prerelease, 'draft is a prerelease');
} }
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes'); check(
check((failureCount === 0), 'Draft release looks good to go.', true); !!draft.body &&
draft.body.length > 50 &&
!draft.body.includes('(placeholder)'),
'draft has release notes'
);
check(failureCount === 0, 'Draft release looks good to go.', true);
} }
return draft; return draft;
} }
async function validateReleaseAssets (release, validatingRelease) { type MinimalRelease = {
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort(); id: number;
const extantAssets = release.assets.map(asset => asset.name).sort(); tag_name: string;
const downloadUrls = release.assets.map(asset => ({ url: asset.browser_download_url, file: asset.name })).sort((a, b) => a.file.localeCompare(b.file)); draft: boolean;
prerelease: boolean;
assets: {
name: string;
browser_download_url: string;
id: number;
}[];
};
async function validateReleaseAssets (
release: MinimalRelease,
validatingRelease: boolean = false
) {
const requiredAssets = assetsForVersion(
release.tag_name,
validatingRelease
).sort();
const extantAssets = release.assets.map((asset) => asset.name).sort();
const downloadUrls = release.assets
.map((asset) => ({ url: asset.browser_download_url, file: asset.name }))
.sort((a, b) => a.file.localeCompare(b.file));
failureCount = 0; failureCount = 0;
for (const asset of requiredAssets) { for (const asset of requiredAssets) {
check(extantAssets.includes(asset), asset); check(extantAssets.includes(asset), asset);
} }
check((failureCount === 0), 'All required GitHub assets exist for release', true); check(
failureCount === 0,
'All required GitHub assets exist for release',
true
);
if (!validatingRelease || !release.draft) { if (!validatingRelease || !release.draft) {
if (release.draft) { if (release.draft) {
await verifyDraftGitHubReleaseAssets(release); await verifyDraftGitHubReleaseAssets(release);
} else { } else {
await verifyShasumsForRemoteFiles(downloadUrls) await verifyShasumsForRemoteFiles(downloadUrls).catch((err) => {
.catch(err => {
console.error(`${fail} error verifyingShasums`, err); console.error(`${fail} error verifyingShasums`, err);
}); });
} }
@ -85,7 +115,7 @@ async function validateReleaseAssets (release, validatingRelease) {
} }
} }
function check (condition, statement, exitIfFail = false) { function check (condition: boolean, statement: string, exitIfFail = false) {
if (condition) { if (condition) {
console.log(`${pass} ${statement}`); console.log(`${pass} ${statement}`);
} else { } else {
@ -95,7 +125,7 @@ function check (condition, statement, exitIfFail = false) {
} }
} }
function assetsForVersion (version, validatingRelease) { function assetsForVersion (version: string, validatingRelease: boolean) {
const patterns = [ const patterns = [
`chromedriver-${version}-darwin-x64.zip`, `chromedriver-${version}-darwin-x64.zip`,
`chromedriver-${version}-darwin-arm64.zip`, `chromedriver-${version}-darwin-arm64.zip`,
@ -177,7 +207,7 @@ function assetsForVersion (version, validatingRelease) {
return patterns; return patterns;
} }
const cloudStoreFilePaths = (version) => [ const cloudStoreFilePaths = (version: string) => [
`iojs-${version}-headers.tar.gz`, `iojs-${version}-headers.tar.gz`,
`iojs-${version}.tar.gz`, `iojs-${version}.tar.gz`,
`node-${version}.tar.gz`, `node-${version}.tar.gz`,
@ -194,7 +224,7 @@ const cloudStoreFilePaths = (version) => [
'SHASUMS256.txt' 'SHASUMS256.txt'
]; ];
function azRemoteFilesForVersion (version) { function azRemoteFilesForVersion (version: string) {
const azCDN = 'https://artifacts.electronjs.org/headers/'; const azCDN = 'https://artifacts.electronjs.org/headers/';
const versionPrefix = `${azCDN}dist/${version}/`; const versionPrefix = `${azCDN}dist/${version}/`;
return cloudStoreFilePaths(version).map((filePath) => ({ return cloudStoreFilePaths(version).map((filePath) => ({
@ -203,10 +233,10 @@ function azRemoteFilesForVersion (version) {
})); }));
} }
function runScript (scriptName, scriptArgs, cwd) { function runScript (scriptName: string, scriptArgs: string[], cwd?: string) {
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`; const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
const scriptOptions = { const scriptOptions: ExecSyncOptions = {
encoding: 'UTF-8' encoding: 'utf-8'
}; };
if (cwd) scriptOptions.cwd = cwd; if (cwd) scriptOptions.cwd = cwd;
try { try {
@ -219,29 +249,48 @@ function runScript (scriptName, scriptArgs, cwd) {
function uploadNodeShasums () { function uploadNodeShasums () {
console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.'); console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py'); const scriptPath = join(
ELECTRON_DIR,
'script',
'release',
'uploaders',
'upload-node-checksums.py'
);
runScript(scriptPath, ['-v', pkgVersion]); runScript(scriptPath, ['-v', pkgVersion]);
console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`); console.log(
`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`
);
} }
function uploadIndexJson () { function uploadIndexJson () {
console.log('Uploading index.json to artifacts.electronjs.org.'); console.log('Uploading index.json to artifacts.electronjs.org.');
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py'); const scriptPath = join(
ELECTRON_DIR,
'script',
'release',
'uploaders',
'upload-index-json.py'
);
runScript(scriptPath, [pkgVersion]); runScript(scriptPath, [pkgVersion]);
console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`); console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
} }
async function mergeShasums (pkgVersion) { async function mergeShasums (pkgVersion: string) {
// Download individual checksum files for Electron zip files from artifact storage, // Download individual checksum files for Electron zip files from artifact storage,
// concatenate them, and upload to GitHub. // concatenate them, and upload to GitHub.
const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE; const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
if (!connectionString) { if (!connectionString) {
throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable'); throw new Error(
'Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable'
);
} }
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString); const blobServiceClient =
const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad'); BlobServiceClient.fromConnectionString(connectionString);
const containerClient = blobServiceClient.getContainerClient(
'checksums-scratchpad'
);
const blobsIter = containerClient.listBlobsFlat({ const blobsIter = containerClient.listBlobsFlat({
prefix: `${pkgVersion}/` prefix: `${pkgVersion}/`
}); });
@ -256,16 +305,22 @@ async function mergeShasums (pkgVersion) {
return shasums.join('\n'); return shasums.join('\n');
} }
async function createReleaseShasums (release) { async function createReleaseShasums (release: MinimalRelease) {
const fileName = 'SHASUMS256.txt'; const fileName = 'SHASUMS256.txt';
const existingAssets = release.assets.filter(asset => asset.name === fileName); const existingAssets = release.assets.filter(
(asset) => asset.name === fileName
);
if (existingAssets.length > 0) { if (existingAssets.length > 0) {
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`); console.log(
await octokit.repos.deleteReleaseAsset({ `${fileName} already exists on GitHub; deleting before creating new file.`
);
await octokit.repos
.deleteReleaseAsset({
owner: 'electron', owner: 'electron',
repo: targetRepo, repo: targetRepo,
asset_id: existingAssets[0].id asset_id: existingAssets[0].id
}).catch(err => { })
.catch((err) => {
console.error(`${fail} Error deleting ${fileName} on GitHub:`, err); console.error(`${fail} Error deleting ${fileName} on GitHub:`, err);
process.exit(1); process.exit(1);
}); });
@ -282,31 +337,37 @@ async function createReleaseShasums (release) {
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`); console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
} }
async function uploadShasumFile (filePath, fileName, releaseId) { async function uploadShasumFile (
filePath: string,
fileName: string,
releaseId: number
) {
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`; const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
return octokit.repos.uploadReleaseAsset({ return octokit.repos
.uploadReleaseAsset({
url: uploadUrl, url: uploadUrl,
headers: { headers: {
'content-type': 'text/plain', 'content-type': 'text/plain',
'content-length': fs.statSync(filePath).size 'content-length': statSync(filePath).size
}, },
data: fs.createReadStream(filePath), data: createReadStream(filePath),
name: fileName name: fileName
}).catch(err => { } as any)
.catch((err) => {
console.error(`${fail} Error uploading ${filePath} to GitHub:`, err); console.error(`${fail} Error uploading ${filePath} to GitHub:`, err);
process.exit(1); process.exit(1);
}); });
} }
function saveShaSumFile (checksums, fileName) { function saveShaSumFile (checksums: string, fileName: string) {
return new Promise(resolve => { return new Promise<string>((resolve) => {
temp.open(fileName, (err, info) => { temp.open(fileName, (err, info) => {
if (err) { if (err) {
console.error(`${fail} Could not create ${fileName} file`); console.error(`${fail} Could not create ${fileName} file`);
process.exit(1); process.exit(1);
} else { } else {
fs.writeFileSync(info.fd, checksums); writeFileSync(info.fd, checksums);
fs.close(info.fd, (err) => { close(info.fd, (err) => {
if (err) { if (err) {
console.error(`${fail} Could close ${fileName} file`); console.error(`${fail} Could close ${fileName} file`);
process.exit(1); process.exit(1);
@ -318,7 +379,7 @@ function saveShaSumFile (checksums, fileName) {
}); });
} }
async function publishRelease (release) { async function publishRelease (release: MinimalRelease) {
let makeLatest = false; let makeLatest = false;
if (!release.prerelease) { if (!release.prerelease) {
const currentLatest = await octokit.repos.getLatestRelease({ const currentLatest = await octokit.repos.getLatestRelease({
@ -326,23 +387,25 @@ async function publishRelease (release) {
repo: targetRepo repo: targetRepo
}); });
makeLatest = semver.gte(release.tag_name, currentLatest.data.tag_name); makeLatest = gte(release.tag_name, currentLatest.data.tag_name);
} }
return octokit.repos.updateRelease({ return octokit.repos
.updateRelease({
owner: 'electron', owner: 'electron',
repo: targetRepo, repo: targetRepo,
release_id: release.id, release_id: release.id,
tag_name: release.tag_name, tag_name: release.tag_name,
draft: false, draft: false,
make_latest: makeLatest ? 'true' : 'false' make_latest: makeLatest ? 'true' : 'false'
}).catch(err => { })
.catch((err) => {
console.error(`${fail} Error publishing release:`, err); console.error(`${fail} Error publishing release:`, err);
process.exit(1); process.exit(1);
}); });
} }
async function makeRelease (releaseToValidate) { async function makeRelease (releaseToValidate: string | boolean) {
if (releaseToValidate) { if (releaseToValidate) {
if (releaseToValidate === true) { if (releaseToValidate === true) {
releaseToValidate = pkgVersion; releaseToValidate = pkgVersion;
@ -367,18 +430,21 @@ async function makeRelease (releaseToValidate) {
// in index.json, which causes other problems in downstream projects // in index.json, which causes other problems in downstream projects
uploadIndexJson(); uploadIndexJson();
await publishRelease(draftRelease); await publishRelease(draftRelease);
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` + console.log(
'"npm run publish-to-npm" to publish release to npm.'); `${pass} SUCCESS!!! Release has been published. Please run ` +
'"npm run publish-to-npm" to publish release to npm.'
);
} }
} }
const SHASUM_256_FILENAME = 'SHASUMS256.txt'; const SHASUM_256_FILENAME = 'SHASUMS256.txt';
const SHASUM_1_FILENAME = 'SHASUMS.txt'; const SHASUM_1_FILENAME = 'SHASUMS.txt';
async function verifyDraftGitHubReleaseAssets (release) { async function verifyDraftGitHubReleaseAssets (release: MinimalRelease) {
console.log('Fetching authenticated GitHub artifact URLs to verify shasums'); console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
const remoteFilesToHash = await Promise.all(release.assets.map(async asset => { const remoteFilesToHash = await Promise.all(
release.assets.map(async (asset) => {
const requestOptions = octokit.repos.getReleaseAsset.endpoint({ const requestOptions = octokit.repos.getReleaseAsset.endpoint({
owner: 'electron', owner: 'electron',
repo: targetRepo, repo: targetRepo,
@ -389,22 +455,27 @@ async function verifyDraftGitHubReleaseAssets (release) {
}); });
const { url, headers } = requestOptions; const { url, headers } = requestOptions;
headers.authorization = `token ${(await octokit.auth()).token}`; headers.authorization = `token ${
((await octokit.auth()) as { token: string }).token
}`;
const response = await got(url, { const response = await got(url, {
followRedirect: false, followRedirect: false,
method: 'HEAD', method: 'HEAD',
headers, headers: headers as any,
throwHttpErrors: false throwHttpErrors: false
}); });
if (response.statusCode !== 302 && response.statusCode !== 301) { if (response.statusCode !== 302 && response.statusCode !== 301) {
console.error('Failed to HEAD github asset: ' + url); console.error('Failed to HEAD github asset: ' + url);
throw new Error('Unexpected status HEAD\'ing github asset: ' + response.statusCode); throw new Error(
"Unexpected status HEAD'ing github asset: " + response.statusCode
);
} }
return { url: response.headers.location, file: asset.name }; return { url: response.headers.location!, file: asset.name };
})).catch(err => { })
).catch((err) => {
console.error(`${fail} Error downloading files from GitHub`, err); console.error(`${fail} Error downloading files from GitHub`, err);
process.exit(1); process.exit(1);
}); });
@ -412,7 +483,10 @@ async function verifyDraftGitHubReleaseAssets (release) {
await verifyShasumsForRemoteFiles(remoteFilesToHash); await verifyShasumsForRemoteFiles(remoteFilesToHash);
} }
async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) { async function getShaSumMappingFromUrl (
shaSumFileUrl: string,
fileNamePrefix: string
) {
const response = await got(shaSumFileUrl, { const response = await got(shaSumFileUrl, {
throwHttpErrors: false throwHttpErrors: false
}); });
@ -420,55 +494,115 @@ async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
if (response.statusCode !== 200) { if (response.statusCode !== 200) {
console.error('Failed to fetch SHASUM mapping: ' + shaSumFileUrl); console.error('Failed to fetch SHASUM mapping: ' + shaSumFileUrl);
console.error('Bad SHASUM mapping response: ' + response.body.trim()); console.error('Bad SHASUM mapping response: ' + response.body.trim());
throw new Error('Unexpected status fetching SHASUM mapping: ' + response.statusCode); throw new Error(
'Unexpected status fetching SHASUM mapping: ' + response.statusCode
);
} }
const raw = response.body; const raw = response.body;
return raw.split('\n').map(line => line.trim()).filter(Boolean).reduce((map, line) => { return raw
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
.reduce((map, line) => {
const [sha, file] = line.replace(' ', ' ').split(' '); const [sha, file] = line.replace(' ', ' ').split(' ');
map[file.slice(fileNamePrefix.length)] = sha; map[file.slice(fileNamePrefix.length)] = sha;
return map; return map;
}, {}); }, Object.create(null) as Record<string, string>);
} }
async function validateFileHashesAgainstShaSumMapping (remoteFilesWithHashes, mapping) { type HashedFile = HashableFile & {
hash: string;
};
type HashableFile = {
file: string;
url: string;
};
async function validateFileHashesAgainstShaSumMapping (
remoteFilesWithHashes: HashedFile[],
mapping: Record<string, string>
) {
for (const remoteFileWithHash of remoteFilesWithHashes) { for (const remoteFileWithHash of remoteFilesWithHashes) {
check(remoteFileWithHash.hash === mapping[remoteFileWithHash.file], `Release asset ${remoteFileWithHash.file} should have hash of ${mapping[remoteFileWithHash.file]} but found ${remoteFileWithHash.hash}`, true); check(
remoteFileWithHash.hash === mapping[remoteFileWithHash.file],
`Release asset ${remoteFileWithHash.file} should have hash of ${
mapping[remoteFileWithHash.file]
} but found ${remoteFileWithHash.hash}`,
true
);
} }
} }
async function verifyShasumsForRemoteFiles (remoteFilesToHash, filesAreNodeJSArtifacts = false) { async function verifyShasumsForRemoteFiles (
console.log(`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`); remoteFilesToHash: HashableFile[],
filesAreNodeJSArtifacts = false
) {
console.log(
`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`
);
// Only used for node.js artifact uploads // Only used for node.js artifact uploads
const shaSum1File = remoteFilesToHash.find(({ file }) => file === SHASUM_1_FILENAME); const shaSum1File = remoteFilesToHash.find(
({ file }) => file === SHASUM_1_FILENAME
)!;
// Used for both node.js artifact uploads and normal electron artifacts // Used for both node.js artifact uploads and normal electron artifacts
const shaSum256File = remoteFilesToHash.find(({ file }) => file === SHASUM_256_FILENAME); const shaSum256File = remoteFilesToHash.find(
remoteFilesToHash = remoteFilesToHash.filter(({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME); ({ file }) => file === SHASUM_256_FILENAME
)!;
remoteFilesToHash = remoteFilesToHash.filter(
({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME
);
const remoteFilesWithHashes = await Promise.all(remoteFilesToHash.map(async (file) => { const remoteFilesWithHashes = await Promise.all(
remoteFilesToHash.map(async (file) => {
return { return {
hash: await getUrlHash(file.url, 'sha256'), hash: await getUrlHash(file.url, 'sha256'),
...file ...file
}; };
})); })
);
await validateFileHashesAgainstShaSumMapping(remoteFilesWithHashes, await getShaSumMappingFromUrl(shaSum256File.url, filesAreNodeJSArtifacts ? '' : '*')); await validateFileHashesAgainstShaSumMapping(
remoteFilesWithHashes,
await getShaSumMappingFromUrl(
shaSum256File.url,
filesAreNodeJSArtifacts ? '' : '*'
)
);
if (filesAreNodeJSArtifacts) { if (filesAreNodeJSArtifacts) {
const remoteFilesWithSha1Hashes = await Promise.all(remoteFilesToHash.map(async (file) => { const remoteFilesWithSha1Hashes = await Promise.all(
remoteFilesToHash.map(async (file) => {
return { return {
hash: await getUrlHash(file.url, 'sha1'), hash: await getUrlHash(file.url, 'sha1'),
...file ...file
}; };
})); })
);
await validateFileHashesAgainstShaSumMapping(remoteFilesWithSha1Hashes, await getShaSumMappingFromUrl(shaSum1File.url, filesAreNodeJSArtifacts ? '' : '*')); await validateFileHashesAgainstShaSumMapping(
remoteFilesWithSha1Hashes,
await getShaSumMappingFromUrl(
shaSum1File.url,
filesAreNodeJSArtifacts ? '' : '*'
)
);
} }
} }
makeRelease(args.validateRelease) const {
.catch((err) => { values: { validateRelease }
} = parseArgs({
options: {
validateRelease: {
type: 'boolean'
}
}
});
makeRelease(!!validateRelease).catch((err) => {
console.error('Error occurred while making release:', err); console.error('Error occurred while making release:', err);
process.exit(1); process.exit(1);
}); });

7
script/release/types.ts Normal file
View file

@ -0,0 +1,7 @@
export const ELECTRON_ORG = 'electron';
export const ELECTRON_REPO = 'electron';
export const NIGHTLY_REPO = 'nightlies';
export type ElectronReleaseRepo = 'electron' | 'nightlies';
export type VersionBumpType = 'nightly' | 'alpha' | 'beta' | 'minor' | 'stable';

View file

@ -2,8 +2,6 @@ import { Octokit } from '@octokit/rest';
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import { createGitHubTokenStrategy } from '../github-token'; import { createGitHubTokenStrategy } from '../github-token';
if (!process.env.CI) require('dotenv-safe').load();
if (process.argv.length < 6) { if (process.argv.length < 6) {
console.log('Usage: upload-to-github filePath fileName releaseId'); console.log('Usage: upload-to-github filePath fileName releaseId');
process.exit(1); process.exit(1);

View file

@ -387,12 +387,12 @@ def upload_sha256_checksum(version, file_path, key_prefix=None):
def get_release(version): def get_release(version):
script_path = os.path.join( script_path = os.path.join(
ELECTRON_DIR, 'script', 'release', 'find-github-release.js') ELECTRON_DIR, 'script', 'release', 'find-github-release.ts')
# Strip warnings from stdout to ensure the only output is the desired object # Strip warnings from stdout to ensure the only output is the desired object
release_env = os.environ.copy() release_env = os.environ.copy()
release_env['NODE_NO_WARNINGS'] = '1' release_env['NODE_NO_WARNINGS'] = '1'
release_info = execute(['node', script_path, version], release_env) release_info = execute([TS_NODE, script_path, version], release_env)
if is_verbose_mode(): if is_verbose_mode():
print('Release info for version: {}:\n'.format(version)) print('Release info for version: {}:\n'.format(version))
print(release_info) print(release_info)

View file

@ -1,99 +0,0 @@
#!/usr/bin/env node
const semver = require('semver');
const minimist = require('minimist');
const { getElectronVersion } = require('../lib/get-version');
const versionUtils = require('./version-utils');
function parseCommandLine () {
let help;
const opts = minimist(process.argv.slice(2), {
string: ['bump', 'version'],
boolean: ['dryRun', 'help'],
alias: { version: ['v'] },
unknown: () => { help = true; }
});
if (help || opts.help || !opts.bump) {
console.log(`
Bump release version number. Possible arguments:\n
--bump=patch to increment patch version\n
--version={version} to set version number directly\n
--dryRun to print the next version without updating files
Note that you can use both --bump and --stable simultaneously.
`);
process.exit(0);
}
return opts;
}
// run the script
async function main () {
const opts = parseCommandLine();
const currentVersion = getElectronVersion();
const version = await nextVersion(opts.bump, currentVersion);
// print would-be new version and exit early
if (opts.dryRun) {
console.log(`new version number would be: ${version}\n`);
return 0;
}
console.log(`Bumped to version: ${version}`);
}
// get next version for release based on [nightly, alpha, beta, stable]
async function nextVersion (bumpType, version) {
if (
versionUtils.isNightly(version) ||
versionUtils.isAlpha(version) ||
versionUtils.isBeta(version)
) {
switch (bumpType) {
case 'nightly':
version = await versionUtils.nextNightly(version);
break;
case 'alpha':
version = await versionUtils.nextAlpha(version);
break;
case 'beta':
version = await versionUtils.nextBeta(version);
break;
case 'stable':
version = semver.valid(semver.coerce(version));
break;
default:
throw new Error('Invalid bump type.');
}
} else if (versionUtils.isStable(version)) {
switch (bumpType) {
case 'nightly':
version = versionUtils.nextNightly(version);
break;
case 'alpha':
throw new Error('Cannot bump to alpha from stable.');
case 'beta':
throw new Error('Cannot bump to beta from stable.');
case 'minor':
version = semver.inc(version, 'minor');
break;
case 'stable':
version = semver.inc(version, 'patch');
break;
default:
throw new Error('Invalid bump type.');
}
} else {
throw new Error(`Invalid current version: ${version}`);
}
return version;
}
if (require.main === module) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}
module.exports = { nextVersion };

View file

@ -0,0 +1,106 @@
#!/usr/bin/env node
import { valid, coerce, inc } from 'semver';
import { getElectronVersion } from '../lib/get-version';
import {
isNightly,
isAlpha,
isBeta,
nextNightly,
nextAlpha,
nextBeta,
isStable
} from './version-utils';
import { VersionBumpType } from './types';
import { parseArgs } from 'node:util';
// run the script
async function main () {
const { values: { bump, dryRun, help } } = parseArgs({
options: {
bump: {
type: 'string'
},
dryRun: {
type: 'boolean'
},
help: {
type: 'boolean'
}
}
});
if (!bump || help) {
console.log(`
Bump release version number. Possible arguments:\n
--bump=patch to increment patch version\n
--version={version} to set version number directly\n
--dryRun to print the next version without updating files
Note that you can use both --bump and --stable simultaneously.
`);
if (!bump) process.exit(0);
else process.exit(1);
}
const currentVersion = getElectronVersion();
const version = await nextVersion(bump as VersionBumpType, currentVersion);
// print would-be new version and exit early
if (dryRun) {
console.log(`new version number would be: ${version}\n`);
return 0;
}
console.log(`Bumped to version: ${version}`);
}
// get next version for release based on [nightly, alpha, beta, stable]
export async function nextVersion (bumpType: VersionBumpType, version: string) {
if (isNightly(version) || isAlpha(version) || isBeta(version)) {
switch (bumpType) {
case 'nightly':
version = await nextNightly(version);
break;
case 'alpha':
version = await nextAlpha(version);
break;
case 'beta':
version = await nextBeta(version);
break;
case 'stable':
version = valid(coerce(version))!;
break;
default:
throw new Error('Invalid bump type.');
}
} else if (isStable(version)) {
switch (bumpType) {
case 'nightly':
version = await nextNightly(version);
break;
case 'alpha':
throw new Error('Cannot bump to alpha from stable.');
case 'beta':
throw new Error('Cannot bump to beta from stable.');
case 'minor':
version = inc(version, 'minor')!;
break;
case 'stable':
version = inc(version, 'patch')!;
break;
default:
throw new Error('Invalid bump type.');
}
} else {
throw new Error(`Invalid current version: ${version}`);
}
return version;
}
if (require.main === module) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View file

@ -1,13 +1,13 @@
const semver = require('semver'); import * as semver from 'semver';
const { GitProcess } = require('dugite'); import { GitProcess } from 'dugite';
const { ELECTRON_DIR } = require('../lib/utils'); import { ELECTRON_DIR } from '../lib/utils';
const preType = { export enum PreType {
NONE: 'none', NONE = 'none',
PARTIAL: 'partial', PARTIAL = ' partial',
FULL: 'full' FULL = 'full',
}; }
const getCurrentDate = () => { const getCurrentDate = () => {
const d = new Date(); const d = new Date();
@ -17,53 +17,43 @@ const getCurrentDate = () => {
return `${yyyy}${mm}${dd}`; return `${yyyy}${mm}${dd}`;
}; };
const isNightly = v => v.includes('nightly'); export const isNightly = (v: string) => v.includes('nightly');
const isAlpha = v => v.includes('alpha'); export const isAlpha = (v: string) => v.includes('alpha');
const isBeta = v => v.includes('beta'); export const isBeta = (v: string) => v.includes('beta');
const isStable = v => { export const isStable = (v: string) => {
const parsed = semver.parse(v); const parsed = semver.parse(v);
return !!(parsed && parsed.prerelease.length === 0); return !!(parsed && parsed.prerelease.length === 0);
}; };
const makeVersion = (components, delim, pre = preType.NONE) => { export async function nextAlpha (v: string) {
let version = [components.major, components.minor, components.patch].join(delim);
if (pre === preType.PARTIAL) {
version += `${delim}${components.pre[1] || 0}`;
} else if (pre === preType.FULL) {
version += `-${components.pre[0]}${delim}${components.pre[1]}`;
}
return version;
};
async function nextAlpha (v) {
const next = semver.coerce(semver.clean(v)); const next = semver.coerce(semver.clean(v));
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-alpha.*`], ELECTRON_DIR); const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-alpha.*`], ELECTRON_DIR);
const tags = tagBlob.stdout.split('\n').filter(e => e !== ''); const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
tags.sort((t1, t2) => { tags.sort((t1, t2) => {
const a = parseInt(t1.split('.').pop(), 10); const a = parseInt(t1.split('.').pop()!, 10);
const b = parseInt(t2.split('.').pop(), 10); const b = parseInt(t2.split('.').pop()!, 10);
return a - b; return a - b;
}); });
// increment the latest existing alpha tag or start at alpha.1 if it's a new alpha line // increment the latest existing alpha tag or start at alpha.1 if it's a new alpha line
return tags.length === 0 ? `${next}-alpha.1` : semver.inc(tags.pop(), 'prerelease'); return tags.length === 0 ? `${next}-alpha.1` : semver.inc(tags.pop()!, 'prerelease')!;
} }
async function nextBeta (v) { export async function nextBeta (v: string) {
const next = semver.coerce(semver.clean(v)); const next = semver.coerce(semver.clean(v));
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR); const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR);
const tags = tagBlob.stdout.split('\n').filter(e => e !== ''); const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
tags.sort((t1, t2) => { tags.sort((t1, t2) => {
const a = parseInt(t1.split('.').pop(), 10); const a = parseInt(t1.split('.').pop()!, 10);
const b = parseInt(t2.split('.').pop(), 10); const b = parseInt(t2.split('.').pop()!, 10);
return a - b; return a - b;
}); });
// increment the latest existing beta tag or start at beta.1 if it's a new beta line // increment the latest existing beta tag or start at beta.1 if it's a new beta line
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease'); return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop()!, 'prerelease')!;
} }
async function nextNightly (v) { export async function nextNightly (v: string) {
let next = semver.valid(semver.coerce(v)); let next = semver.valid(semver.coerce(v));
const pre = `nightly.${getCurrentDate()}`; const pre = `nightly.${getCurrentDate()}`;
@ -71,7 +61,7 @@ async function nextNightly (v) {
if (branch === 'main') { if (branch === 'main') {
next = semver.inc(await getLastMajorForMain(), 'major'); next = semver.inc(await getLastMajorForMain(), 'major');
} else if (isStable(v)) { } else if (isStable(v)) {
next = semver.inc(next, 'patch'); next = semver.inc(next!, 'patch');
} }
return `${next}-${pre}`; return `${next}-${pre}`;
@ -89,19 +79,7 @@ async function getLastMajorForMain () {
} }
} }
function getNextReleaseBranch (branches) { function getNextReleaseBranch (branches: string[]) {
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0')); const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'));
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2); return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2);
} }
module.exports = {
isStable,
isAlpha,
isBeta,
isNightly,
nextAlpha,
nextBeta,
makeVersion,
nextNightly,
preType
};

View file

@ -1,6 +1,6 @@
import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite'; import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite';
import { expect } from 'chai'; import { expect } from 'chai';
import * as notes from '../script/release/notes/notes.js'; import * as notes from '../script/release/notes/notes';
import * as path from 'node:path'; import * as path from 'node:path';
import * as sinon from 'sinon'; import * as sinon from 'sinon';

View file

@ -1,7 +1,6 @@
import { expect } from 'chai'; import { expect } from 'chai';
import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite'; import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite';
import { nextVersion } from '../script/release/version-bumper'; import { nextVersion } from '../script/release/version-bumper';
import * as utils from '../script/release/version-utils';
import * as sinon from 'sinon'; import * as sinon from 'sinon';
import { ifdescribe } from './lib/spec-helpers'; import { ifdescribe } from './lib/spec-helpers';
@ -53,43 +52,6 @@ class GitFake {
} }
describe('version-bumper', () => { describe('version-bumper', () => {
describe('makeVersion', () => {
it('makes a version with a period delimiter', () => {
const components = {
major: 2,
minor: 0,
patch: 0
};
const version = utils.makeVersion(components, '.');
expect(version).to.equal('2.0.0');
});
it('makes a version with a period delimiter and a partial pre', () => {
const components = {
major: 2,
minor: 0,
patch: 0,
pre: ['nightly', 12345678]
};
const version = utils.makeVersion(components, '.', utils.preType.PARTIAL);
expect(version).to.equal('2.0.0.12345678');
});
it('makes a version with a period delimiter and a full pre', () => {
const components = {
major: 2,
minor: 0,
patch: 0,
pre: ['nightly', 12345678]
};
const version = utils.makeVersion(components, '.', utils.preType.FULL);
expect(version).to.equal('2.0.0-nightly.12345678');
});
});
ifdescribe(!(process.platform === 'linux' && process.arch.indexOf('arm') === 0) && process.platform !== 'darwin')('nextVersion', () => { ifdescribe(!(process.platform === 'linux' && process.arch.indexOf('arm') === 0) && process.platform !== 'darwin')('nextVersion', () => {
describe('bump versions', () => { describe('bump versions', () => {
const nightlyPattern = /[0-9.]*(-nightly.(\d{4})(\d{2})(\d{2}))$/g; const nightlyPattern = /[0-9.]*(-nightly.(\d{4})(\d{2})(\d{2}))$/g;
@ -183,6 +145,7 @@ describe('version-bumper', () => {
it('throws on an invalid bump type', () => { it('throws on an invalid bump type', () => {
const version = 'v2.0.0'; const version = 'v2.0.0';
return expect( return expect(
// @ts-expect-error 'WRONG' is not a valid bump type
nextVersion('WRONG', version) nextVersion('WRONG', version)
).to.be.rejectedWith('Invalid bump type.'); ).to.be.rejectedWith('Invalid bump type.');
}); });

139
yarn.lock
View file

@ -442,18 +442,10 @@
dependencies: dependencies:
"@octokit/types" "^9.0.0" "@octokit/types" "^9.0.0"
"@octokit/core@^4.1.0": "@octokit/auth-token@^4.0.0":
version "4.2.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-4.2.0.tgz#8c253ba9605aca605bc46187c34fcccae6a96648" resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7"
integrity sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg== integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==
dependencies:
"@octokit/auth-token" "^3.0.0"
"@octokit/graphql" "^5.0.0"
"@octokit/request" "^6.0.0"
"@octokit/request-error" "^3.0.0"
"@octokit/types" "^9.0.0"
before-after-hook "^2.2.0"
universal-user-agent "^6.0.0"
"@octokit/core@^4.2.1": "@octokit/core@^4.2.1":
version "4.2.1" version "4.2.1"
@ -468,6 +460,19 @@
before-after-hook "^2.2.0" before-after-hook "^2.2.0"
universal-user-agent "^6.0.0" universal-user-agent "^6.0.0"
"@octokit/core@^5.0.2":
version "5.2.0"
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea"
integrity sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==
dependencies:
"@octokit/auth-token" "^4.0.0"
"@octokit/graphql" "^7.1.0"
"@octokit/request" "^8.3.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.0.0"
before-after-hook "^2.2.0"
universal-user-agent "^6.0.0"
"@octokit/endpoint@^7.0.0": "@octokit/endpoint@^7.0.0":
version "7.0.3" version "7.0.3"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.3.tgz#0b96035673a9e3bedf8bab8f7335de424a2147ed" resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.3.tgz#0b96035673a9e3bedf8bab8f7335de424a2147ed"
@ -477,6 +482,14 @@
is-plain-object "^5.0.0" is-plain-object "^5.0.0"
universal-user-agent "^6.0.0" universal-user-agent "^6.0.0"
"@octokit/endpoint@^9.0.1":
version "9.0.5"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.5.tgz#e6c0ee684e307614c02fc6ac12274c50da465c44"
integrity sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==
dependencies:
"@octokit/types" "^13.1.0"
universal-user-agent "^6.0.0"
"@octokit/graphql@^5.0.0": "@octokit/graphql@^5.0.0":
version "5.0.5" version "5.0.5"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.5.tgz#a4cb3ea73f83b861893a6370ee82abb36e81afd2" resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.5.tgz#a4cb3ea73f83b861893a6370ee82abb36e81afd2"
@ -486,6 +499,15 @@
"@octokit/types" "^9.0.0" "@octokit/types" "^9.0.0"
universal-user-agent "^6.0.0" universal-user-agent "^6.0.0"
"@octokit/graphql@^7.1.0":
version "7.1.0"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.0.tgz#9bc1c5de92f026648131f04101cab949eeffe4e0"
integrity sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==
dependencies:
"@octokit/request" "^8.3.0"
"@octokit/types" "^13.0.0"
universal-user-agent "^6.0.0"
"@octokit/oauth-authorization-url@^5.0.0": "@octokit/oauth-authorization-url@^5.0.0":
version "5.0.0" version "5.0.0"
resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-5.0.0.tgz#029626ce87f3b31addb98cd0d2355c2381a1c5a1" resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-5.0.0.tgz#029626ce87f3b31addb98cd0d2355c2381a1c5a1"
@ -517,12 +539,17 @@
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-17.2.0.tgz#f1800b5f9652b8e1b85cc6dfb1e0dc888810bdb5" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-17.2.0.tgz#f1800b5f9652b8e1b85cc6dfb1e0dc888810bdb5"
integrity sha512-MazrFNx4plbLsGl+LFesMo96eIXkFgEtaKbnNpdh4aQ0VM10aoylFsTYP1AEjkeoRNZiiPe3T6Gl2Hr8dJWdlQ== integrity sha512-MazrFNx4plbLsGl+LFesMo96eIXkFgEtaKbnNpdh4aQ0VM10aoylFsTYP1AEjkeoRNZiiPe3T6Gl2Hr8dJWdlQ==
"@octokit/plugin-paginate-rest@^6.0.0": "@octokit/openapi-types@^22.2.0":
version "6.0.0" version "22.2.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.0.0.tgz#f34b5a7d9416019126042cd7d7b811e006c0d561" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-22.2.0.tgz#75aa7dcd440821d99def6a60b5f014207ae4968e"
integrity sha512-Sq5VU1PfT6/JyuXPyt04KZNVsFOSBaYOAq2QRZUwzVlI10KFvcbUo8lR258AAQL1Et60b0WuVik+zOWKLuDZxw== integrity sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==
"@octokit/plugin-paginate-rest@11.3.1":
version "11.3.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.1.tgz#fe92d04b49f134165d6fbb716e765c2f313ad364"
integrity sha512-ryqobs26cLtM1kQxqeZui4v8FeznirUsksiA+RYemMPJ7Micju0WSkv50dBksTuZks9O5cg4wp+t8fZ/cLY56g==
dependencies: dependencies:
"@octokit/types" "^9.0.0" "@octokit/types" "^13.5.0"
"@octokit/plugin-paginate-rest@^6.1.2": "@octokit/plugin-paginate-rest@^6.1.2":
version "6.1.2" version "6.1.2"
@ -537,13 +564,17 @@
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz#5e50ed7083a613816b1e4a28aeec5fb7f1462e85" resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz#5e50ed7083a613816b1e4a28aeec5fb7f1462e85"
integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA== integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==
"@octokit/plugin-rest-endpoint-methods@^7.0.0": "@octokit/plugin-request-log@^4.0.0":
version "7.0.1" version "4.0.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.0.1.tgz#f7ebe18144fd89460f98f35a587b056646e84502" resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958"
integrity sha512-pnCaLwZBudK5xCdrR823xHGNgqOzRnJ/mpC/76YPpNP7DybdsJtP7mdOwh+wYZxK5jqeQuhu59ogMI4NRlBUvA== integrity sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==
"@octokit/plugin-rest-endpoint-methods@13.2.2":
version "13.2.2"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.2.tgz#af8e5dd2cddfea576f92ffaf9cb84659f302a638"
integrity sha512-EI7kXWidkt3Xlok5uN43suK99VWqc8OaIMktY9d9+RNKl69juoTyxmLoWPIZgJYzi41qj/9zU7G/ljnNOJ5AFA==
dependencies: dependencies:
"@octokit/types" "^9.0.0" "@octokit/types" "^13.5.0"
deprecation "^2.3.1"
"@octokit/plugin-rest-endpoint-methods@^7.1.2": "@octokit/plugin-rest-endpoint-methods@^7.1.2":
version "7.1.2" version "7.1.2"
@ -562,6 +593,15 @@
deprecation "^2.0.0" deprecation "^2.0.0"
once "^1.4.0" once "^1.4.0"
"@octokit/request-error@^5.1.0":
version "5.1.0"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30"
integrity sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==
dependencies:
"@octokit/types" "^13.1.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request@^6.0.0": "@octokit/request@^6.0.0":
version "6.2.4" version "6.2.4"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.4.tgz#b00a7185865c72bdd432e63168b1e900953ded0c" resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.4.tgz#b00a7185865c72bdd432e63168b1e900953ded0c"
@ -574,6 +614,16 @@
node-fetch "^2.6.7" node-fetch "^2.6.7"
universal-user-agent "^6.0.0" universal-user-agent "^6.0.0"
"@octokit/request@^8.3.0", "@octokit/request@^8.3.1":
version "8.4.0"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-8.4.0.tgz#7f4b7b1daa3d1f48c0977ad8fffa2c18adef8974"
integrity sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==
dependencies:
"@octokit/endpoint" "^9.0.1"
"@octokit/request-error" "^5.1.0"
"@octokit/types" "^13.1.0"
universal-user-agent "^6.0.0"
"@octokit/rest@^19.0.11": "@octokit/rest@^19.0.11":
version "19.0.11" version "19.0.11"
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.11.tgz#2ae01634fed4bd1fca5b642767205ed3fd36177c" resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.11.tgz#2ae01634fed4bd1fca5b642767205ed3fd36177c"
@ -584,21 +634,28 @@
"@octokit/plugin-request-log" "^1.0.4" "@octokit/plugin-request-log" "^1.0.4"
"@octokit/plugin-rest-endpoint-methods" "^7.1.2" "@octokit/plugin-rest-endpoint-methods" "^7.1.2"
"@octokit/rest@^19.0.7": "@octokit/rest@^20.0.2":
version "19.0.7" version "20.1.1"
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.7.tgz#d2e21b4995ab96ae5bfae50b4969da7e04e0bb70" resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-20.1.1.tgz#ec775864f53fb42037a954b9a40d4f5275b3dc95"
integrity sha512-HRtSfjrWmWVNp2uAkEpQnuGMJsu/+dBr47dRc5QVgsCbnIc1+GFEaoKBWkYG+zjrsHpSqcAElMio+n10c0b5JA== integrity sha512-MB4AYDsM5jhIHro/dq4ix1iWTLGToIGk6cWF5L6vanFaMble5jTX/UBQyiv05HsWnwUtY8JrfHy2LWfKwihqMw==
dependencies: dependencies:
"@octokit/core" "^4.1.0" "@octokit/core" "^5.0.2"
"@octokit/plugin-paginate-rest" "^6.0.0" "@octokit/plugin-paginate-rest" "11.3.1"
"@octokit/plugin-request-log" "^1.0.4" "@octokit/plugin-request-log" "^4.0.0"
"@octokit/plugin-rest-endpoint-methods" "^7.0.0" "@octokit/plugin-rest-endpoint-methods" "13.2.2"
"@octokit/tsconfig@^1.0.2": "@octokit/tsconfig@^1.0.2":
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/@octokit/tsconfig/-/tsconfig-1.0.2.tgz#59b024d6f3c0ed82f00d08ead5b3750469125af7" resolved "https://registry.yarnpkg.com/@octokit/tsconfig/-/tsconfig-1.0.2.tgz#59b024d6f3c0ed82f00d08ead5b3750469125af7"
integrity sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA== integrity sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==
"@octokit/types@^13.0.0", "@octokit/types@^13.1.0", "@octokit/types@^13.5.0":
version "13.6.0"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.6.0.tgz#db13d345cc3fe1a0f7c07171c724d90f2b55f410"
integrity sha512-CrooV/vKCXqwLa+osmHLIMUb87brpgUqlqkPGc6iE2wCkUvTrHiXFMhAKoDDaAAYJrtKtrFTgSQTg5nObBEaew==
dependencies:
"@octokit/openapi-types" "^22.2.0"
"@octokit/types@^8.0.0": "@octokit/types@^8.0.0":
version "8.0.0" version "8.0.0"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-8.0.0.tgz#93f0b865786c4153f0f6924da067fe0bb7426a9f" resolved "https://registry.yarnpkg.com/@octokit/types/-/types-8.0.0.tgz#93f0b865786c4153f0f6924da067fe0bb7426a9f"
@ -2183,18 +2240,6 @@ doctrine@^3.0.0:
dependencies: dependencies:
esutils "^2.0.2" esutils "^2.0.2"
dotenv-safe@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/dotenv-safe/-/dotenv-safe-4.0.4.tgz#8b0e7ced8e70b1d3c5d874ef9420e406f39425b3"
integrity sha1-iw587Y5wsdPF2HTvlCDkBvOUJbM=
dependencies:
dotenv "^4.0.0"
dotenv@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-4.0.0.tgz#864ef1379aced55ce6f95debecdce179f7a0cd1d"
integrity sha1-hk7xN5rO1Vzm+V3r7NzhefegzR0=
dugite@^2.3.0: dugite@^2.3.0:
version "2.3.0" version "2.3.0"
resolved "https://registry.yarnpkg.com/dugite/-/dugite-2.3.0.tgz#ff6fdb4c899f84ed6695c9e01eaf4364a6211f13" resolved "https://registry.yarnpkg.com/dugite/-/dugite-2.3.0.tgz#ff6fdb4c899f84ed6695c9e01eaf4364a6211f13"
@ -6741,10 +6786,10 @@ typedarray@^0.0.6:
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
typescript@^5.1.2: typescript@^5.6.2:
version "5.1.3" version "5.6.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.3.tgz#8d84219244a6b40b6fb2b33cc1c062f715b9e826" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.2.tgz#d1de67b6bef77c41823f822df8f0b3bcff60a5a0"
integrity sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw== integrity sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==
uc.micro@^1.0.1, uc.micro@^1.0.5: uc.micro@^1.0.1, uc.micro@^1.0.5:
version "1.0.6" version "1.0.6"