build: convert all release scripts to typescript (#44060)
* build: run gha on tag not branch (#42490) * build: convert all release scripts to typescript (#44035) * build: convert all release scripts to typescript * fix test imports * build: fix version bumper export * refactor: use as const * spec: fix bad type spec * build: use ts-node to spawn the version-bumper (#44057) Missed this in the tsification, we should probably call this via API instead of spawning a sub-proc? * build: still colors
This commit is contained in:
parent
956677b66a
commit
2e84985439
22 changed files with 1173 additions and 779 deletions
|
@ -11,7 +11,7 @@
|
|||
"@electron/github-app-auth": "^2.0.0",
|
||||
"@electron/lint-roller": "^2.3.0",
|
||||
"@electron/typescript-definitions": "^8.15.2",
|
||||
"@octokit/rest": "^19.0.7",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@primer/octicons": "^10.0.0",
|
||||
"@types/basic-auth": "^1.1.3",
|
||||
"@types/busboy": "^1.5.0",
|
||||
|
@ -36,7 +36,6 @@
|
|||
"buffer": "^6.0.3",
|
||||
"check-for-leaks": "^1.2.1",
|
||||
"colors": "1.4.0",
|
||||
"dotenv-safe": "^4.0.4",
|
||||
"dugite": "^2.3.0",
|
||||
"eslint": "^8.41.0",
|
||||
"eslint-config-standard": "^14.1.1",
|
||||
|
@ -68,7 +67,7 @@
|
|||
"timers-browserify": "1.4.2",
|
||||
"ts-loader": "^8.0.2",
|
||||
"ts-node": "6.2.0",
|
||||
"typescript": "^5.1.2",
|
||||
"typescript": "^5.6.2",
|
||||
"url": "^0.11.0",
|
||||
"webpack": "^5.76.0",
|
||||
"webpack-cli": "^4.10.0",
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const assert = require('node:assert');
|
||||
const fs = require('node:fs');
|
||||
const got = require('got');
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import got, { OptionsOfTextResponseBody } from 'got';
|
||||
import * as assert from 'node:assert';
|
||||
|
||||
const assert = require('node:assert');
|
||||
const got = require('got');
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_ORG, ELECTRON_REPO } from './types';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy('electron')
|
||||
});
|
||||
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds';
|
||||
const GH_ACTIONS_PIPELINE_URL = 'https://github.com/electron/electron/actions';
|
||||
const GH_ACTIONS_API_URL = '/repos/electron/electron/actions';
|
||||
|
||||
const GH_ACTIONS_WAIT_TIME = process.env.GH_ACTIONS_WAIT_TIME || 30000;
|
||||
const GH_ACTIONS_WAIT_TIME = process.env.GH_ACTIONS_WAIT_TIME ? parseInt(process.env.GH_ACTIONS_WAIT_TIME, 10) : 30000;
|
||||
|
||||
const appVeyorJobs = {
|
||||
'electron-x64': 'electron-x64-release',
|
||||
|
@ -22,13 +22,23 @@ const appVeyorJobs = {
|
|||
};
|
||||
|
||||
const ghActionsPublishWorkflows = [
|
||||
'macos-publish',
|
||||
'linux-publish'
|
||||
];
|
||||
'linux-publish',
|
||||
'macos-publish'
|
||||
] as const;
|
||||
|
||||
let jobRequestedCount = 0;
|
||||
|
||||
async function makeRequest ({ auth, username, password, url, headers, body, method }) {
|
||||
type ReleaseBuildRequestOptions = {
|
||||
auth?: {
|
||||
bearer?: string;
|
||||
};
|
||||
url: string;
|
||||
headers: Record<string, string>;
|
||||
body: string,
|
||||
method: 'GET' | 'POST';
|
||||
}
|
||||
|
||||
async function makeRequest ({ auth, url, headers, body, method }: ReleaseBuildRequestOptions) {
|
||||
const clonedHeaders = {
|
||||
...(headers || {})
|
||||
};
|
||||
|
@ -36,17 +46,12 @@ async function makeRequest ({ auth, username, password, url, headers, body, meth
|
|||
clonedHeaders.Authorization = `Bearer ${auth.bearer}`;
|
||||
}
|
||||
|
||||
const options = {
|
||||
const options: OptionsOfTextResponseBody = {
|
||||
headers: clonedHeaders,
|
||||
body,
|
||||
method
|
||||
};
|
||||
|
||||
if (username || password) {
|
||||
options.username = username;
|
||||
options.password = password;
|
||||
}
|
||||
|
||||
const response = await got(url, options);
|
||||
|
||||
if (response.statusCode < 200 || response.statusCode >= 300) {
|
||||
|
@ -56,11 +61,17 @@ async function makeRequest ({ auth, username, password, url, headers, body, meth
|
|||
return JSON.parse(response.body);
|
||||
}
|
||||
|
||||
async function githubActionsCall (targetBranch, workflowName, options) {
|
||||
type GitHubActionsCallOptions = {
|
||||
ghRelease?: boolean;
|
||||
newVersion: string;
|
||||
runningPublishWorkflows?: boolean;
|
||||
}
|
||||
|
||||
async function githubActionsCall (targetBranch: string, workflowName: string, options: GitHubActionsCallOptions) {
|
||||
console.log(`Triggering GitHub Actions to run build job: ${workflowName} on branch: ${targetBranch} with release flag.`);
|
||||
const buildRequest = {
|
||||
branch: targetBranch,
|
||||
parameters: {}
|
||||
parameters: {} as Record<string, string | boolean>
|
||||
};
|
||||
if (options.ghRelease) {
|
||||
buildRequest.parameters['upload-to-storage'] = '0';
|
||||
|
@ -81,13 +92,13 @@ async function githubActionsCall (targetBranch, workflowName, options) {
|
|||
console.error('Could not fetch most recent commits for GitHub Actions, returning early');
|
||||
}
|
||||
|
||||
await octokit.request(`POST ${GH_ACTIONS_API_URL}/workflows/${workflowName}.yml/dispatches`, {
|
||||
ref: buildRequest.branch,
|
||||
await octokit.actions.createWorkflowDispatch({
|
||||
repo: ELECTRON_REPO,
|
||||
owner: ELECTRON_ORG,
|
||||
workflow_id: `${workflowName}.yml`,
|
||||
ref: `refs/tags/${options.newVersion}`,
|
||||
inputs: {
|
||||
...buildRequest.parameters
|
||||
},
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -110,17 +121,18 @@ async function githubActionsCall (targetBranch, workflowName, options) {
|
|||
}
|
||||
}
|
||||
|
||||
async function getGitHubActionsRun (workflowId, headCommit) {
|
||||
async function getGitHubActionsRun (workflowName: string, headCommit: string) {
|
||||
let runNumber = 0;
|
||||
let actionRun;
|
||||
while (runNumber === 0) {
|
||||
const actionsRuns = await octokit.request(`GET ${GH_ACTIONS_API_URL}/workflows/${workflowId}.yml/runs`, {
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
const actionsRuns = await octokit.actions.listWorkflowRuns({
|
||||
repo: ELECTRON_REPO,
|
||||
owner: ELECTRON_ORG,
|
||||
workflow_id: `${workflowName}.yml`
|
||||
});
|
||||
|
||||
if (!actionsRuns.data.workflow_runs.length) {
|
||||
console.log(`No current workflow_runs found for ${workflowId}, response was: ${actionsRuns.data.workflow_runs}`);
|
||||
console.log(`No current workflow_runs found for ${workflowName}, response was: ${actionsRuns.data.workflow_runs}`);
|
||||
runNumber = -1;
|
||||
break;
|
||||
}
|
||||
|
@ -163,9 +175,14 @@ async function getGitHubActionsRun (workflowId, headCommit) {
|
|||
return runNumber;
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch, job, options) {
|
||||
type AppVeyorCallOptions = {
|
||||
ghRelease?: boolean;
|
||||
commit?: string;
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch: string, job: keyof typeof appVeyorJobs, options: AppVeyorCallOptions) {
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
const environmentVariables: Record<string, string | number> = {
|
||||
ELECTRON_RELEASE: 1,
|
||||
APPVEYOR_BUILD_WORKER_CLOUD: 'electronhq-16-core'
|
||||
};
|
||||
|
@ -190,14 +207,14 @@ async function callAppVeyor (targetBranch, job, options) {
|
|||
environmentVariables
|
||||
}),
|
||||
method: 'POST'
|
||||
};
|
||||
} as const;
|
||||
jobRequestedCount++;
|
||||
|
||||
try {
|
||||
const { version } = await makeRequest(requestOpts, true);
|
||||
const { version } = await makeRequest(requestOpts);
|
||||
const buildUrl = `https://ci.appveyor.com/project/electron-bot/${appVeyorJobs[job]}/build/${version}`;
|
||||
console.log(`AppVeyor release build request for ${job} successful. Check build status at ${buildUrl}`);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (err.response?.body) {
|
||||
console.error('Could not call AppVeyor: ', {
|
||||
statusCode: err.response.statusCode,
|
||||
|
@ -209,68 +226,120 @@ async function callAppVeyor (targetBranch, job, options) {
|
|||
}
|
||||
}
|
||||
|
||||
function buildAppVeyor (targetBranch, options) {
|
||||
const validJobs = Object.keys(appVeyorJobs);
|
||||
type BuildAppVeyorOptions = {
|
||||
job?: keyof typeof appVeyorJobs;
|
||||
} & AppVeyorCallOptions;
|
||||
|
||||
async function buildAppVeyor (targetBranch: string, options: BuildAppVeyorOptions) {
|
||||
const validJobs = Object.keys(appVeyorJobs) as (keyof typeof appVeyorJobs)[];
|
||||
if (options.job) {
|
||||
assert(validJobs.includes(options.job), `Unknown AppVeyor CI job name: ${options.job}. Valid values are: ${validJobs}.`);
|
||||
callAppVeyor(targetBranch, options.job, options);
|
||||
await callAppVeyor(targetBranch, options.job, options);
|
||||
} else {
|
||||
for (const job of validJobs) {
|
||||
callAppVeyor(targetBranch, job, options);
|
||||
await callAppVeyor(targetBranch, job, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
function buildGHActions (targetBranch, options) {
|
||||
type BuildGHActionsOptions = {
|
||||
job?: typeof ghActionsPublishWorkflows[number];
|
||||
arch?: string;
|
||||
} & GitHubActionsCallOptions;
|
||||
|
||||
async function buildGHActions (targetBranch: string, options: BuildGHActionsOptions) {
|
||||
if (options.job) {
|
||||
assert(ghActionsPublishWorkflows.includes(options.job), `Unknown GitHub Actions workflow name: ${options.job}. Valid values are: ${ghActionsPublishWorkflows}.`);
|
||||
githubActionsCall(targetBranch, options.job, options);
|
||||
await githubActionsCall(targetBranch, options.job, options);
|
||||
} else {
|
||||
assert(!options.arch, 'Cannot provide a single architecture while building all workflows, please specify a single workflow via --workflow');
|
||||
options.runningPublishWorkflows = true;
|
||||
for (const job of ghActionsPublishWorkflows) {
|
||||
githubActionsCall(targetBranch, job, options);
|
||||
await githubActionsCall(targetBranch, job, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function runRelease (targetBranch, options) {
|
||||
type RunReleaseOptions = ({
|
||||
ci: 'GitHubActions'
|
||||
} & BuildGHActionsOptions) | ({
|
||||
ci: 'AppVeyor'
|
||||
} & BuildAppVeyorOptions) | ({
|
||||
ci: undefined,
|
||||
} & BuildAppVeyorOptions & BuildGHActionsOptions);
|
||||
|
||||
async function runRelease (targetBranch: string, options: RunReleaseOptions) {
|
||||
if (options.ci) {
|
||||
switch (options.ci) {
|
||||
case 'GitHubActions': {
|
||||
buildGHActions(targetBranch, options);
|
||||
await buildGHActions(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
case 'AppVeyor': {
|
||||
buildAppVeyor(targetBranch, options);
|
||||
await buildAppVeyor(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log(`Error! Unknown CI: ${options.ci}.`);
|
||||
console.log(`Error! Unknown CI: ${(options as any).ci}.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
buildAppVeyor(targetBranch, options);
|
||||
buildGHActions(targetBranch, options);
|
||||
await Promise.all([
|
||||
buildAppVeyor(targetBranch, options),
|
||||
buildGHActions(targetBranch, options)
|
||||
]);
|
||||
}
|
||||
console.log(`${jobRequestedCount} jobs were requested.`);
|
||||
}
|
||||
|
||||
module.exports = runRelease;
|
||||
export default runRelease;
|
||||
|
||||
if (require.main === module) {
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['ghRelease']
|
||||
const { values: { ghRelease, job, arch, ci, commit, newVersion }, positionals } = parseArgs({
|
||||
options: {
|
||||
ghRelease: {
|
||||
type: 'boolean'
|
||||
},
|
||||
job: {
|
||||
type: 'string'
|
||||
},
|
||||
arch: {
|
||||
type: 'string'
|
||||
},
|
||||
ci: {
|
||||
type: 'string'
|
||||
},
|
||||
commit: {
|
||||
type: 'string'
|
||||
},
|
||||
newVersion: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
allowPositionals: true
|
||||
});
|
||||
const targetBranch = args._[0];
|
||||
if (args._.length < 1) {
|
||||
const targetBranch = positionals[0];
|
||||
if (positionals.length < 1) {
|
||||
console.log(`Trigger CI to build release builds of electron.
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--arch=INDIVIDUAL_ARCH] [--ci=AppVeyor|GitHubActions]
|
||||
[--ghRelease] [--appveyorJobId=xxx] [--commit=sha] TARGET_BRANCH
|
||||
[--ghRelease] [--commit=sha] [--newVersion=version_tag] TARGET_BRANCH
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
runRelease(targetBranch, args);
|
||||
if (ci === 'GitHubActions' || !ci) {
|
||||
if (!newVersion) {
|
||||
console.error('--newVersion is required for GitHubActions');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
runRelease(targetBranch, {
|
||||
ci: ci as 'GitHubActions' | 'AppVeyor',
|
||||
ghRelease,
|
||||
job: job as any,
|
||||
arch,
|
||||
newVersion: newVersion!,
|
||||
commit
|
||||
});
|
||||
}
|
|
@ -1,7 +1,6 @@
|
|||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.log('Usage: find-release version');
|
||||
|
@ -15,13 +14,13 @@ const octokit = new Octokit({
|
|||
authStrategy: createGitHubTokenStrategy(targetRepo)
|
||||
});
|
||||
|
||||
function findRepo () {
|
||||
return version.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
function findRepo (): ElectronReleaseRepo {
|
||||
return version.indexOf('nightly') > 0 ? NIGHTLY_REPO : ELECTRON_REPO;
|
||||
}
|
||||
|
||||
async function findRelease () {
|
||||
const releases = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
owner: ELECTRON_ORG,
|
||||
repo: targetRepo
|
||||
});
|
||||
|
||||
|
@ -43,4 +42,8 @@ async function findRelease () {
|
|||
console.log(JSON.stringify(returnObject));
|
||||
}
|
||||
|
||||
findRelease();
|
||||
findRelease()
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,8 +1,9 @@
|
|||
const { Octokit } = require('@octokit/rest');
|
||||
const got = require('got');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import got from 'got';
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ElectronReleaseRepo } from './types';
|
||||
|
||||
async function getAssetContents (repo, assetId) {
|
||||
export async function getAssetContents (repo: ElectronReleaseRepo, assetId: number) {
|
||||
const octokit = new Octokit({
|
||||
userAgent: 'electron-asset-fetcher',
|
||||
authStrategy: createGitHubTokenStrategy(repo)
|
||||
|
@ -18,12 +19,12 @@ async function getAssetContents (repo, assetId) {
|
|||
});
|
||||
|
||||
const { url, headers } = requestOptions;
|
||||
headers.authorization = `token ${(await octokit.auth()).token}`;
|
||||
headers.authorization = `token ${(await octokit.auth() as { token: string }).token}`;
|
||||
|
||||
const response = await got(url, {
|
||||
followRedirect: false,
|
||||
method: 'HEAD',
|
||||
headers,
|
||||
headers: headers as Record<string, string>,
|
||||
throwHttpErrors: false
|
||||
});
|
||||
|
||||
|
@ -48,7 +49,3 @@ async function getAssetContents (repo, assetId) {
|
|||
|
||||
return fileResponse.body;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAssetContents
|
||||
};
|
|
@ -1,17 +1,20 @@
|
|||
const got = require('got');
|
||||
const url = require('node:url');
|
||||
import got from 'got';
|
||||
import * as url from 'node:url';
|
||||
|
||||
module.exports = async function getUrlHash (targetUrl, algorithm = 'sha256', attempts = 3) {
|
||||
const HASHER_FUNCTION_HOST = 'electron-artifact-hasher.azurewebsites.net';
|
||||
const HASHER_FUNCTION_ROUTE = '/api/HashArtifact';
|
||||
|
||||
export async function getUrlHash (targetUrl: string, algorithm = 'sha256', attempts = 3) {
|
||||
const options = {
|
||||
code: process.env.ELECTRON_ARTIFACT_HASHER_FUNCTION_KEY,
|
||||
code: process.env.ELECTRON_ARTIFACT_HASHER_FUNCTION_KEY!,
|
||||
targetUrl,
|
||||
algorithm
|
||||
};
|
||||
const search = new url.URLSearchParams(options);
|
||||
const functionUrl = url.format({
|
||||
protocol: 'https:',
|
||||
hostname: 'electron-artifact-hasher.azurewebsites.net',
|
||||
pathname: '/api/HashArtifact',
|
||||
hostname: HASHER_FUNCTION_HOST,
|
||||
pathname: HASHER_FUNCTION_ROUTE,
|
||||
search: search.toString()
|
||||
});
|
||||
try {
|
||||
|
@ -27,10 +30,11 @@ module.exports = async function getUrlHash (targetUrl, algorithm = 'sha256', att
|
|||
return resp.body.trim();
|
||||
} catch (err) {
|
||||
if (attempts > 1) {
|
||||
if (err.response?.body) {
|
||||
const { response } = err as any;
|
||||
if (response?.body) {
|
||||
console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, {
|
||||
statusCode: err.response.statusCode,
|
||||
body: JSON.parse(err.response.body)
|
||||
statusCode: response.statusCode,
|
||||
body: JSON.parse(response.body)
|
||||
});
|
||||
} else {
|
||||
console.error(`Failed to get URL hash for ${targetUrl} - we will retry`, err);
|
|
@ -1,9 +1,11 @@
|
|||
const { createTokenAuth } = require('@octokit/auth-token');
|
||||
const got = require('got').default;
|
||||
import { createTokenAuth } from '@octokit/auth-token';
|
||||
import got from 'got';
|
||||
|
||||
import { ElectronReleaseRepo } from './types';
|
||||
|
||||
const cachedTokens = Object.create(null);
|
||||
|
||||
async function ensureToken (repo) {
|
||||
async function ensureToken (repo: ElectronReleaseRepo) {
|
||||
if (!cachedTokens[repo]) {
|
||||
cachedTokens[repo] = await (async () => {
|
||||
const { ELECTRON_GITHUB_TOKEN, SUDOWOODO_EXCHANGE_URL, SUDOWOODO_EXCHANGE_TOKEN } = process.env;
|
||||
|
@ -35,23 +37,24 @@ async function ensureToken (repo) {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports.createGitHubTokenStrategy = (repo) => () => {
|
||||
let tokenAuth = null;
|
||||
export const createGitHubTokenStrategy = (repo: ElectronReleaseRepo) => () => {
|
||||
let tokenAuth: ReturnType<typeof createTokenAuth> | null = null;
|
||||
|
||||
async function ensureTokenAuth () {
|
||||
async function ensureTokenAuth (): Promise<ReturnType<typeof createTokenAuth>> {
|
||||
if (!tokenAuth) {
|
||||
await ensureToken(repo);
|
||||
tokenAuth = createTokenAuth(cachedTokens[repo]);
|
||||
}
|
||||
return tokenAuth;
|
||||
}
|
||||
|
||||
async function auth () {
|
||||
await ensureTokenAuth();
|
||||
return await tokenAuth();
|
||||
return await (await ensureTokenAuth())();
|
||||
}
|
||||
auth.hook = async (...args) => {
|
||||
await ensureTokenAuth();
|
||||
return await tokenAuth.hook(...args);
|
||||
const hook: ReturnType<typeof createTokenAuth>['hook'] = async (...args) => {
|
||||
const a = (await ensureTokenAuth());
|
||||
return (a as any).hook(...args);
|
||||
};
|
||||
auth.hook = hook;
|
||||
return auth;
|
||||
};
|
|
@ -1,22 +1,22 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite');
|
||||
const minimist = require('minimist');
|
||||
const path = require('node:path');
|
||||
const semver = require('semver');
|
||||
import { GitProcess } from 'dugite';
|
||||
import { basename } from 'node:path';
|
||||
import { valid, compare, gte, lte } from 'semver';
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const notesGenerator = require('./notes.js');
|
||||
import { ELECTRON_DIR } from '../../lib/utils';
|
||||
import { get, render } from './notes';
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { createGitHubTokenStrategy } = require('../github-token');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { createGitHubTokenStrategy } from '../github-token';
|
||||
import { parseArgs } from 'node:util';
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy('electron')
|
||||
});
|
||||
|
||||
const semverify = version => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.');
|
||||
const semverify = (version: string) => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.');
|
||||
|
||||
const runGit = async (args) => {
|
||||
const runGit = async (args: string[]) => {
|
||||
console.info(`Running: git ${args.join(' ')}`);
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR);
|
||||
if (response.exitCode !== 0) {
|
||||
|
@ -25,25 +25,25 @@ const runGit = async (args) => {
|
|||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsAlpha = tag => tag && tag.includes('alpha');
|
||||
const tagIsBeta = tag => tag && tag.includes('beta');
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag);
|
||||
const tagIsSupported = (tag: string) => !!tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsAlpha = (tag: string) => !!tag && tag.includes('alpha');
|
||||
const tagIsBeta = (tag: string) => !!tag && tag.includes('beta');
|
||||
const tagIsStable = (tag: string) => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag);
|
||||
|
||||
const getTagsOf = async (point) => {
|
||||
const getTagsOf = async (point: string) => {
|
||||
try {
|
||||
const tags = await runGit(['tag', '--merged', point]);
|
||||
return tags.split('\n')
|
||||
.map(tag => tag.trim())
|
||||
.filter(tag => semver.valid(tag))
|
||||
.sort(semver.compare);
|
||||
.filter(tag => valid(tag))
|
||||
.sort(compare);
|
||||
} catch (err) {
|
||||
console.error(`Failed to fetch tags for point ${point}`);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const getTagsOnBranch = async (point) => {
|
||||
const getTagsOnBranch = async (point: string) => {
|
||||
const { data: { default_branch: defaultBranch } } = await octokit.repos.get({
|
||||
owner: 'electron',
|
||||
repo: 'electron'
|
||||
|
@ -57,7 +57,7 @@ const getTagsOnBranch = async (point) => {
|
|||
return (await getTagsOf(point)).filter(tag => !mainTagsSet.has(tag));
|
||||
};
|
||||
|
||||
const getBranchOf = async (point) => {
|
||||
const getBranchOf = async (point: string) => {
|
||||
try {
|
||||
const branches = (await runGit(['branch', '-a', '--contains', point]))
|
||||
.split('\n')
|
||||
|
@ -89,11 +89,11 @@ const getStabilizationBranches = async () => {
|
|||
return (await getAllBranches()).filter(branch => /^origin\/\d+-x-y$/.test(branch));
|
||||
};
|
||||
|
||||
const getPreviousStabilizationBranch = async (current) => {
|
||||
const getPreviousStabilizationBranch = async (current: string) => {
|
||||
const stabilizationBranches = (await getStabilizationBranches())
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`);
|
||||
|
||||
if (!semver.valid(current)) {
|
||||
if (!valid(current)) {
|
||||
// since we don't seem to be on a stabilization branch right now,
|
||||
// pick a placeholder name that will yield the newest branch
|
||||
// as a comparison point.
|
||||
|
@ -102,20 +102,20 @@ const getPreviousStabilizationBranch = async (current) => {
|
|||
|
||||
let newestMatch = null;
|
||||
for (const branch of stabilizationBranches) {
|
||||
if (semver.gte(semverify(branch), semverify(current))) {
|
||||
if (gte(semverify(branch), semverify(current))) {
|
||||
continue;
|
||||
}
|
||||
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) {
|
||||
if (newestMatch && lte(semverify(branch), semverify(newestMatch))) {
|
||||
continue;
|
||||
}
|
||||
newestMatch = branch;
|
||||
}
|
||||
return newestMatch;
|
||||
return newestMatch!;
|
||||
};
|
||||
|
||||
const getPreviousPoint = async (point) => {
|
||||
const getPreviousPoint = async (point: string) => {
|
||||
const currentBranch = await getBranchOf(point);
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop();
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()!;
|
||||
const currentIsStable = tagIsStable(currentTag);
|
||||
|
||||
try {
|
||||
|
@ -146,18 +146,18 @@ const getPreviousPoint = async (point) => {
|
|||
}
|
||||
};
|
||||
|
||||
async function getReleaseNotes (range, newVersion, unique) {
|
||||
async function getReleaseNotes (range: string, newVersion?: string, unique?: boolean) {
|
||||
const rangeList = range.split('..') || ['HEAD'];
|
||||
const to = rangeList.pop();
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to));
|
||||
const to = rangeList.pop()!;
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to))!;
|
||||
|
||||
if (!newVersion) {
|
||||
newVersion = to;
|
||||
}
|
||||
|
||||
const notes = await notesGenerator.get(from, to, newVersion);
|
||||
const ret = {
|
||||
text: notesGenerator.render(notes, unique)
|
||||
const notes = await get(from, to, newVersion);
|
||||
const ret: { text: string; warning?: string; } = {
|
||||
text: render(notes, unique)
|
||||
};
|
||||
|
||||
if (notes.unknown.length) {
|
||||
|
@ -168,13 +168,24 @@ async function getReleaseNotes (range, newVersion, unique) {
|
|||
}
|
||||
|
||||
async function main () {
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'unique'],
|
||||
string: ['version']
|
||||
const { values: { help, unique, version }, positionals } = parseArgs({
|
||||
options: {
|
||||
help: {
|
||||
type: 'boolean'
|
||||
},
|
||||
unique: {
|
||||
type: 'boolean'
|
||||
},
|
||||
version: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
allowPositionals: true
|
||||
});
|
||||
opts.range = opts._.shift();
|
||||
if (opts.help || !opts.range) {
|
||||
const name = path.basename(process.argv[1]);
|
||||
|
||||
const range = positionals.shift();
|
||||
if (help || !range) {
|
||||
const name = basename(process.argv[1]);
|
||||
console.log(`
|
||||
easy usage: ${name} version
|
||||
|
||||
|
@ -194,7 +205,7 @@ For example, these invocations are equivalent:
|
|||
return 0;
|
||||
}
|
||||
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts.unique);
|
||||
const notes = await getReleaseNotes(range, version, unique);
|
||||
console.log(notes.text);
|
||||
if (notes.warning) {
|
||||
throw new Error(notes.warning);
|
||||
|
@ -208,4 +219,4 @@ if (require.main === module) {
|
|||
});
|
||||
}
|
||||
|
||||
module.exports = getReleaseNotes;
|
||||
export default getReleaseNotes;
|
|
@ -1,16 +1,13 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
|
||||
import { resolve as _resolve } from 'node:path';
|
||||
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { GitProcess } from 'dugite';
|
||||
|
||||
const { GitProcess } = require('dugite');
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const { createGitHubTokenStrategy } = require('../github-token');
|
||||
import { ELECTRON_DIR } from '../../lib/utils';
|
||||
import { createGitHubTokenStrategy } from '../github-token';
|
||||
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy('electron')
|
||||
|
@ -26,24 +23,52 @@ const NO_NOTES = 'No notes';
|
|||
const docTypes = new Set(['doc', 'docs']);
|
||||
const featTypes = new Set(['feat', 'feature']);
|
||||
const fixTypes = new Set(['fix']);
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'perf', 'style', 'ci']);
|
||||
const knownTypes = new Set([...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]);
|
||||
const otherTypes = new Set([
|
||||
'spec',
|
||||
'build',
|
||||
'test',
|
||||
'chore',
|
||||
'deps',
|
||||
'refactor',
|
||||
'tools',
|
||||
'perf',
|
||||
'style',
|
||||
'ci'
|
||||
]);
|
||||
const knownTypes = new Set([
|
||||
...docTypes.keys(),
|
||||
...featTypes.keys(),
|
||||
...fixTypes.keys(),
|
||||
...otherTypes.keys()
|
||||
]);
|
||||
|
||||
const getCacheDir = () => process.env.NOTES_CACHE_PATH || path.resolve(__dirname, '.cache');
|
||||
const getCacheDir = () =>
|
||||
process.env.NOTES_CACHE_PATH || _resolve(__dirname, '.cache');
|
||||
|
||||
/**
|
||||
***
|
||||
**/
|
||||
***
|
||||
**/
|
||||
|
||||
type MinimalPR = {
|
||||
title: string;
|
||||
body: string | null;
|
||||
number: number;
|
||||
labels: {
|
||||
name: string;
|
||||
}[];
|
||||
base: { repo: { name: string; owner: { login: string } } };
|
||||
};
|
||||
|
||||
// link to a GitHub item, e.g. an issue or pull request
|
||||
class GHKey {
|
||||
constructor (owner, repo, number) {
|
||||
this.owner = owner;
|
||||
this.repo = repo;
|
||||
this.number = number;
|
||||
}
|
||||
// eslint-disable-next-line no-useless-constructor
|
||||
constructor (
|
||||
public readonly owner: string,
|
||||
public readonly repo: string,
|
||||
public readonly number: number
|
||||
) {}
|
||||
|
||||
static NewFromPull (pull) {
|
||||
static NewFromPull (pull: MinimalPR) {
|
||||
const owner = pull.base.repo.owner.login;
|
||||
const repo = pull.base.repo.name;
|
||||
const number = pull.number;
|
||||
|
@ -52,38 +77,33 @@ class GHKey {
|
|||
}
|
||||
|
||||
class Commit {
|
||||
constructor (hash, owner, repo) {
|
||||
this.hash = hash; // string
|
||||
this.owner = owner; // string
|
||||
this.repo = repo; // string
|
||||
public isBreakingChange = false;
|
||||
public note: string | null = null;
|
||||
public trops = new Map<string, GHKey>();
|
||||
public readonly prKeys = new Set<GHKey>();
|
||||
public revertHash: string | null = null;
|
||||
public semanticType: string | null = null;
|
||||
public subject: string | null = null;
|
||||
|
||||
this.isBreakingChange = false;
|
||||
this.note = null; // string
|
||||
|
||||
// A set of branches to which this change has been merged.
|
||||
// '8-x-y' => GHKey { owner: 'electron', repo: 'electron', number: 23714 }
|
||||
this.trops = new Map(); // Map<string,GHKey>
|
||||
|
||||
this.prKeys = new Set(); // GHKey
|
||||
this.revertHash = null; // string
|
||||
this.semanticType = null; // string
|
||||
this.subject = null; // string
|
||||
}
|
||||
// eslint-disable-next-line no-useless-constructor
|
||||
constructor (
|
||||
public readonly hash: string,
|
||||
public readonly owner: string,
|
||||
public readonly repo: string
|
||||
) {}
|
||||
}
|
||||
|
||||
class Pool {
|
||||
constructor () {
|
||||
this.commits = []; // Array<Commit>
|
||||
this.processedHashes = new Set();
|
||||
this.pulls = {}; // GHKey.number => octokit pull object
|
||||
}
|
||||
public commits: Commit[] = [];
|
||||
public processedHashes = new Set<string>();
|
||||
public pulls: Record<number, MinimalPR> = Object.create(null);
|
||||
}
|
||||
|
||||
/**
|
||||
***
|
||||
**/
|
||||
***
|
||||
**/
|
||||
|
||||
const runGit = async (dir, args) => {
|
||||
const runGit = async (dir: string, args: string[]) => {
|
||||
const response = await GitProcess.exec(args, dir);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim());
|
||||
|
@ -91,11 +111,15 @@ const runGit = async (dir, args) => {
|
|||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getCommonAncestor = async (dir, point1, point2) => {
|
||||
const getCommonAncestor = async (
|
||||
dir: string,
|
||||
point1: string,
|
||||
point2: string
|
||||
) => {
|
||||
return runGit(dir, ['merge-base', point1, point2]);
|
||||
};
|
||||
|
||||
const getNoteFromClerk = async (ghKey) => {
|
||||
const getNoteFromClerk = async (ghKey: GHKey) => {
|
||||
const comments = await getComments(ghKey);
|
||||
if (!comments || !comments.data) return;
|
||||
|
||||
|
@ -105,28 +129,29 @@ const getNoteFromClerk = async (ghKey) => {
|
|||
const QUOTE_LEAD = '> ';
|
||||
|
||||
for (const comment of comments.data.reverse()) {
|
||||
if (comment.user.login !== CLERK_LOGIN) {
|
||||
if (comment.user?.login !== CLERK_LOGIN) {
|
||||
continue;
|
||||
}
|
||||
if (comment.body === CLERK_NO_NOTES) {
|
||||
return NO_NOTES;
|
||||
}
|
||||
if (comment.body.startsWith(PERSIST_LEAD)) {
|
||||
if (comment.body?.startsWith(PERSIST_LEAD)) {
|
||||
let lines = comment.body
|
||||
.slice(PERSIST_LEAD.length).trim() // remove PERSIST_LEAD
|
||||
.slice(PERSIST_LEAD.length)
|
||||
.trim() // remove PERSIST_LEAD
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.map(line => line.replace('<', '<'))
|
||||
.map(line => line.replace('>', '>'))
|
||||
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map(line => line.slice(QUOTE_LEAD.length)); // unquote the lines
|
||||
.map((line) => line.trim())
|
||||
.map((line) => line.replace('<', '<'))
|
||||
.map((line) => line.replace('>', '>'))
|
||||
.filter((line) => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map((line) => line.slice(QUOTE_LEAD.length)); // unquote the lines
|
||||
|
||||
const firstLine = lines.shift();
|
||||
// indent anything after the first line to ensure that
|
||||
// multiline notes with their own sub-lists don't get
|
||||
// parsed in the markdown as part of the top-level list
|
||||
// (example: https://github.com/electron/electron/pull/25216)
|
||||
lines = lines.map(line => ' ' + line);
|
||||
lines = lines.map((line) => ' ' + line);
|
||||
return [firstLine, ...lines]
|
||||
.join('\n') // join the lines
|
||||
.trim();
|
||||
|
@ -146,7 +171,7 @@ const getNoteFromClerk = async (ghKey) => {
|
|||
* line starting with 'BREAKING CHANGE' in body -- sets isBreakingChange
|
||||
* 'Backport of #99999' -- sets pr
|
||||
*/
|
||||
const parseCommitMessage = (commitMessage, commit) => {
|
||||
const parseCommitMessage = (commitMessage: string, commit: Commit) => {
|
||||
const { owner, repo } = commit;
|
||||
|
||||
// split commitMessage into subject & body
|
||||
|
@ -180,23 +205,32 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|||
}
|
||||
|
||||
// Check for a comment that indicates a PR
|
||||
const backportPattern = /(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im;
|
||||
const backportPattern =
|
||||
/(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im;
|
||||
if ((match = commitMessage.match(backportPattern))) {
|
||||
// This might be the first or second capture group depending on if it's a link or not.
|
||||
const backportNumber = match[1] ? parseInt(match[1], 10) : parseInt(match[2], 10);
|
||||
const backportNumber = match[1]
|
||||
? parseInt(match[1], 10)
|
||||
: parseInt(match[2], 10);
|
||||
commit.prKeys.add(new GHKey(owner, repo, backportNumber));
|
||||
}
|
||||
|
||||
// https://help.github.com/articles/closing-issues-using-keywords/
|
||||
if (body.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i)) {
|
||||
if (
|
||||
body.match(
|
||||
/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i
|
||||
)
|
||||
) {
|
||||
commit.semanticType = commit.semanticType || 'fix';
|
||||
}
|
||||
|
||||
// https://www.conventionalcommits.org/en
|
||||
if (commitMessage
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.some(line => line.startsWith('BREAKING CHANGE'))) {
|
||||
if (
|
||||
commitMessage
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map((line) => line.trim())
|
||||
.some((line) => line.startsWith('BREAKING CHANGE'))
|
||||
) {
|
||||
commit.isBreakingChange = true;
|
||||
}
|
||||
|
||||
|
@ -209,76 +243,109 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|||
return commit;
|
||||
};
|
||||
|
||||
const parsePullText = (pull, commit) => parseCommitMessage(`${pull.data.title}\n\n${pull.data.body}`, commit);
|
||||
const parsePullText = (pull: MinimalPR, commit: Commit) =>
|
||||
parseCommitMessage(`${pull.title}\n\n${pull.body}`, commit);
|
||||
|
||||
const getLocalCommitHashes = async (dir, ref) => {
|
||||
const getLocalCommitHashes = async (dir: string, ref: string) => {
|
||||
const args = ['log', '--format=%H', ref];
|
||||
return (await runGit(dir, args))
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(hash => hash.trim());
|
||||
.map((hash) => hash.trim());
|
||||
};
|
||||
|
||||
// return an array of Commits
|
||||
const getLocalCommits = async (module, point1, point2) => {
|
||||
const getLocalCommits = async (
|
||||
module: LocalRepo,
|
||||
point1: string,
|
||||
point2: string
|
||||
) => {
|
||||
const { owner, repo, dir } = module;
|
||||
|
||||
const fieldSep = ',';
|
||||
const format = ['%H', '%s'].join(fieldSep);
|
||||
const args = ['log', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
||||
const args = [
|
||||
'log',
|
||||
'--cherry-pick',
|
||||
'--right-only',
|
||||
'--first-parent',
|
||||
`--format=${format}`,
|
||||
`${point1}..${point2}`
|
||||
];
|
||||
const logs = (await runGit(dir, args))
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(field => field.trim());
|
||||
.map((field) => field.trim());
|
||||
|
||||
const commits = [];
|
||||
for (const log of logs) {
|
||||
if (!log) {
|
||||
continue;
|
||||
}
|
||||
const [hash, subject] = log.split(fieldSep, 2).map(field => field.trim());
|
||||
const [hash, subject] = log.split(fieldSep, 2).map((field) => field.trim());
|
||||
commits.push(parseCommitMessage(subject, new Commit(hash, owner, repo)));
|
||||
}
|
||||
return commits;
|
||||
};
|
||||
|
||||
const checkCache = async (name, operation) => {
|
||||
const filename = path.resolve(getCacheDir(), name);
|
||||
if (fs.existsSync(filename)) {
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
||||
const checkCache = async <T>(
|
||||
name: string,
|
||||
operation: () => Promise<T>
|
||||
): Promise<T> => {
|
||||
const filename = _resolve(getCacheDir(), name);
|
||||
if (existsSync(filename)) {
|
||||
return JSON.parse(readFileSync(filename, 'utf8'));
|
||||
}
|
||||
process.stdout.write('.');
|
||||
const response = await operation();
|
||||
if (response) {
|
||||
fs.writeFileSync(filename, JSON.stringify(response));
|
||||
writeFileSync(filename, JSON.stringify(response));
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
// helper function to add some resiliency to volatile GH api endpoints
|
||||
async function runRetryable (fn, maxRetries) {
|
||||
let lastError;
|
||||
async function runRetryable<T> (
|
||||
fn: () => Promise<T>,
|
||||
maxRetries: number
|
||||
): Promise<T | null> {
|
||||
let lastError: Error & { status?: number };
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
await new Promise(resolve => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error;
|
||||
await new Promise((resolve) => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error as any;
|
||||
}
|
||||
}
|
||||
// Silently eat 404s.
|
||||
// Silently eat 422s, which come from "No commit found for SHA"
|
||||
if (lastError.status !== 404 && lastError.status !== 422) throw lastError;
|
||||
// eslint-disable-next-line no-throw-literal
|
||||
if (lastError!.status !== 404 && lastError!.status !== 422) throw lastError!;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const getPullCacheFilename = ghKey => `${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
|
||||
const getPullCacheFilename = (ghKey: GHKey) =>
|
||||
`${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
|
||||
|
||||
const getCommitPulls = async (owner, repo, hash) => {
|
||||
const getCommitPulls = async (owner: string, repo: string, hash: string) => {
|
||||
const name = `${owner}-${repo}-commit-${hash}`;
|
||||
const retryableFunc = () => octokit.repos.listPullRequestsAssociatedWithCommit({ owner, repo, commit_sha: hash });
|
||||
let ret = await checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
const retryableFunc = async () => {
|
||||
const { data } = await octokit.repos.listPullRequestsAssociatedWithCommit({
|
||||
owner,
|
||||
repo,
|
||||
commit_sha: hash
|
||||
});
|
||||
return {
|
||||
data
|
||||
};
|
||||
};
|
||||
let ret = await checkCache(name, () =>
|
||||
runRetryable(retryableFunc, MAX_FAIL_COUNT)
|
||||
);
|
||||
|
||||
// only merged pulls belong in release notes
|
||||
if (ret && ret.data) {
|
||||
ret.data = ret.data.filter(pull => pull.merged_at);
|
||||
ret.data = ret.data.filter((pull) => pull.merged_at);
|
||||
}
|
||||
|
||||
// cache the pulls
|
||||
|
@ -286,7 +353,7 @@ const getCommitPulls = async (owner, repo, hash) => {
|
|||
for (const pull of ret.data) {
|
||||
const cachefile = getPullCacheFilename(GHKey.NewFromPull(pull));
|
||||
const payload = { ...ret, data: pull };
|
||||
await checkCache(cachefile, () => payload);
|
||||
await checkCache(cachefile, async () => payload);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -298,21 +365,39 @@ const getCommitPulls = async (owner, repo, hash) => {
|
|||
return ret;
|
||||
};
|
||||
|
||||
const getPullRequest = async (ghKey) => {
|
||||
const getPullRequest = async (ghKey: GHKey) => {
|
||||
const { number, owner, repo } = ghKey;
|
||||
const name = getPullCacheFilename(ghKey);
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
const retryableFunc = () =>
|
||||
octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const getComments = async (ghKey) => {
|
||||
const getComments = async (ghKey: GHKey) => {
|
||||
const { number, owner, repo } = ghKey;
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`;
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 });
|
||||
const retryableFunc = () =>
|
||||
octokit.issues.listComments({
|
||||
issue_number: number,
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100
|
||||
});
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const addRepoToPool = async (pool, repo, from, to) => {
|
||||
type LocalRepo = {
|
||||
owner: string;
|
||||
repo: string;
|
||||
dir: string;
|
||||
};
|
||||
|
||||
const addRepoToPool = async (
|
||||
pool: Pool,
|
||||
repo: LocalRepo,
|
||||
from: string,
|
||||
to: string
|
||||
) => {
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to);
|
||||
|
||||
// mark the old branch's commits as old news
|
||||
|
@ -337,42 +422,59 @@ const addRepoToPool = async (pool, repo, from, to) => {
|
|||
for (prKey of commit.prKeys.values()) {
|
||||
const pull = await getPullRequest(prKey);
|
||||
if (!pull || !pull.data) continue; // couldn't get it
|
||||
pool.pulls[prKey.number] = pull;
|
||||
parsePullText(pull, commit);
|
||||
pool.pulls[prKey.number] = pull.data;
|
||||
parsePullText(pull.data, commit);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
type MinimalComment = {
|
||||
user: {
|
||||
login: string;
|
||||
} | null;
|
||||
body?: string;
|
||||
};
|
||||
|
||||
// @return Map<string,GHKey>
|
||||
// where the key is a branch name (e.g. '7-1-x' or '8-x-y')
|
||||
// and the value is a GHKey to the PR
|
||||
async function getMergedTrops (commit, pool) {
|
||||
async function getMergedTrops (commit: Commit, pool: Pool) {
|
||||
const branches = new Map();
|
||||
|
||||
for (const prKey of commit.prKeys.values()) {
|
||||
const pull = pool.pulls[prKey.number];
|
||||
const mergedBranches = new Set(
|
||||
((pull && pull.data && pull.data.labels) ? pull.data.labels : [])
|
||||
.map(label => ((label && label.name) ? label.name : '').match(/merged\/([0-9]+-[x0-9]-[xy0-9])/))
|
||||
.filter(match => match)
|
||||
.map(match => match[1])
|
||||
(pull && pull && pull.labels ? pull.labels : [])
|
||||
.map((label) =>
|
||||
(label && label.name ? label.name : '').match(
|
||||
/merged\/([0-9]+-[x0-9]-[xy0-9])/
|
||||
)
|
||||
)
|
||||
.filter((match) => !!match)
|
||||
.map((match) => match[1])
|
||||
);
|
||||
|
||||
if (mergedBranches.size > 0) {
|
||||
const isTropComment = (comment) => comment && comment.user && comment.user.login === TROP_LOGIN;
|
||||
const isTropComment = (comment: MinimalComment | null) =>
|
||||
comment && comment.user && comment.user.login === TROP_LOGIN;
|
||||
|
||||
const ghKey = GHKey.NewFromPull(pull.data);
|
||||
const backportRegex = /backported this PR to "(.*)",\s+please check out #(\d+)/;
|
||||
const getBranchNameAndPullKey = (comment) => {
|
||||
const match = ((comment && comment.body) ? comment.body : '').match(backportRegex);
|
||||
return match ? [match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))] : null;
|
||||
const ghKey = GHKey.NewFromPull(pull);
|
||||
const backportRegex =
|
||||
/backported this PR to "(.*)",\s+please check out #(\d+)/;
|
||||
const getBranchNameAndPullKey = (comment: MinimalComment) => {
|
||||
const match = (comment && comment.body ? comment.body : '').match(
|
||||
backportRegex
|
||||
);
|
||||
return match
|
||||
? <const>[match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))]
|
||||
: null;
|
||||
};
|
||||
|
||||
const comments = await getComments(ghKey);
|
||||
((comments && comments.data) ? comments.data : [])
|
||||
(comments && comments.data ? comments.data : [])
|
||||
.filter(isTropComment)
|
||||
.map(getBranchNameAndPullKey)
|
||||
.filter(pair => pair)
|
||||
.filter((pair) => !!pair)
|
||||
.filter(([branch]) => mergedBranches.has(branch))
|
||||
.forEach(([branch, key]) => branches.set(branch, key));
|
||||
}
|
||||
|
@ -383,36 +485,48 @@ async function getMergedTrops (commit, pool) {
|
|||
|
||||
// @return the shorthand name of the branch that `ref` is on,
|
||||
// e.g. a ref of '10.0.0-beta.1' will return '10-x-y'
|
||||
async function getBranchNameOfRef (ref, dir) {
|
||||
return (await runGit(dir, ['branch', '--all', '--contains', ref, '--sort', 'version:refname']))
|
||||
async function getBranchNameOfRef (ref: string, dir: string) {
|
||||
const result = await runGit(dir, [
|
||||
'branch',
|
||||
'--all',
|
||||
'--contains',
|
||||
ref,
|
||||
'--sort',
|
||||
'version:refname'
|
||||
]);
|
||||
return result
|
||||
.split(/\r?\n/) // split into lines
|
||||
.shift() // we sorted by refname and want the first result
|
||||
.match(/(?:\s?\*\s){0,1}(.*)/)[1] // if present, remove leading '* ' in case we're currently in that branch
|
||||
.match(/(?:.*\/)?(.*)/)[1] // 'remote/origins/10-x-y' -> '10-x-y'
|
||||
.shift()! // we sorted by refname and want the first result
|
||||
.match(/(?:\s?\*\s){0,1}(.*)/)![1] // if present, remove leading '* ' in case we're currently in that branch
|
||||
.match(/(?:.*\/)?(.*)/)![1] // 'remote/origins/10-x-y' -> '10-x-y'
|
||||
.trim();
|
||||
}
|
||||
|
||||
/***
|
||||
**** Main
|
||||
***/
|
||||
**** Main
|
||||
***/
|
||||
|
||||
const getNotes = async (fromRef, toRef, newVersion) => {
|
||||
const getNotes = async (fromRef: string, toRef: string, newVersion: string) => {
|
||||
const cacheDir = getCacheDir();
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir);
|
||||
if (!existsSync(cacheDir)) {
|
||||
mkdirSync(cacheDir);
|
||||
}
|
||||
|
||||
const pool = new Pool();
|
||||
const toBranch = await getBranchNameOfRef(toRef, ELECTRON_DIR);
|
||||
|
||||
console.log(`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`);
|
||||
console.log(
|
||||
`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`
|
||||
);
|
||||
|
||||
// get the electron/electron commits
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_DIR };
|
||||
await addRepoToPool(pool, electron, fromRef, toRef);
|
||||
|
||||
// remove any old commits
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash));
|
||||
pool.commits = pool.commits.filter(
|
||||
(commit) => !pool.processedHashes.has(commit.hash)
|
||||
);
|
||||
|
||||
// if a commit _and_ revert occurred in the unprocessed set, skip them both
|
||||
for (const commit of pool.commits) {
|
||||
|
@ -421,7 +535,7 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
continue;
|
||||
}
|
||||
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash);
|
||||
const revert = pool.commits.find((commit) => commit.hash === revertHash);
|
||||
if (!revert) {
|
||||
continue;
|
||||
}
|
||||
|
@ -438,15 +552,15 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
if (commit.note) {
|
||||
break;
|
||||
}
|
||||
commit.note = await getNoteFromClerk(prKey);
|
||||
commit.note = await getNoteFromClerk(prKey) || null;
|
||||
}
|
||||
}
|
||||
|
||||
// remove non-user-facing commits
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => commit && commit.note)
|
||||
.filter(commit => commit.note !== NO_NOTES)
|
||||
.filter(commit => commit.note.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
|
||||
.filter((commit) => commit && commit.note)
|
||||
.filter((commit) => commit.note !== NO_NOTES)
|
||||
.filter((commit) => commit.note!.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
|
||||
|
||||
for (const commit of pool.commits) {
|
||||
commit.trops = await getMergedTrops(commit, pool);
|
||||
|
@ -455,12 +569,12 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
pool.commits = removeSupercededStackUpdates(pool.commits);
|
||||
|
||||
const notes = {
|
||||
breaking: [],
|
||||
docs: [],
|
||||
feat: [],
|
||||
fix: [],
|
||||
other: [],
|
||||
unknown: [],
|
||||
breaking: [] as Commit[],
|
||||
docs: [] as Commit[],
|
||||
feat: [] as Commit[],
|
||||
fix: [] as Commit[],
|
||||
other: [] as Commit[],
|
||||
unknown: [] as Commit[],
|
||||
name: newVersion,
|
||||
toBranch
|
||||
};
|
||||
|
@ -487,11 +601,13 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
return notes;
|
||||
};
|
||||
|
||||
const compareVersions = (v1, v2) => {
|
||||
const compareVersions = (v1: string, v2: string) => {
|
||||
const [split1, split2] = [v1.split('.'), v2.split('.')];
|
||||
|
||||
if (split1.length !== split2.length) {
|
||||
throw new Error(`Expected version strings to have same number of sections: ${split1} and ${split2}`);
|
||||
throw new Error(
|
||||
`Expected version strings to have same number of sections: ${split1} and ${split2}`
|
||||
);
|
||||
}
|
||||
for (let i = 0; i < split1.length; i++) {
|
||||
const p1 = parseInt(split1[i], 10);
|
||||
|
@ -505,13 +621,13 @@ const compareVersions = (v1, v2) => {
|
|||
return 0;
|
||||
};
|
||||
|
||||
const removeSupercededStackUpdates = (commits) => {
|
||||
const removeSupercededStackUpdates = (commits: Commit[]) => {
|
||||
const updateRegex = /^Updated ([a-zA-Z.]+) to v?([\d.]+)/;
|
||||
const notupdates = [];
|
||||
|
||||
const newest = {};
|
||||
const newest: Record<string, { commit: Commit; version: string }> = Object.create(null);
|
||||
for (const commit of commits) {
|
||||
const match = (commit.note || commit.subject).match(updateRegex);
|
||||
const match = (commit.note || commit.subject)?.match(updateRegex);
|
||||
if (!match) {
|
||||
notupdates.push(commit);
|
||||
continue;
|
||||
|
@ -523,48 +639,56 @@ const removeSupercededStackUpdates = (commits) => {
|
|||
}
|
||||
}
|
||||
|
||||
return [...notupdates, ...Object.values(newest).map(o => o.commit)];
|
||||
return [...notupdates, ...Object.values(newest).map((o) => o.commit)];
|
||||
};
|
||||
|
||||
/***
|
||||
**** Render
|
||||
***/
|
||||
**** Render
|
||||
***/
|
||||
|
||||
// @return the pull request's GitHub URL
|
||||
const buildPullURL = ghKey => `https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
|
||||
const buildPullURL = (ghKey: GHKey) =>
|
||||
`https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
|
||||
|
||||
const renderPull = ghKey => `[#${ghKey.number}](${buildPullURL(ghKey)})`;
|
||||
const renderPull = (ghKey: GHKey) =>
|
||||
`[#${ghKey.number}](${buildPullURL(ghKey)})`;
|
||||
|
||||
// @return the commit's GitHub URL
|
||||
const buildCommitURL = commit => `https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
|
||||
const buildCommitURL = (commit: Commit) =>
|
||||
`https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
|
||||
|
||||
const renderCommit = commit => `[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
|
||||
const renderCommit = (commit: Commit) =>
|
||||
`[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
|
||||
|
||||
// @return a markdown link to the PR if available; otherwise, the git commit
|
||||
function renderLink (commit) {
|
||||
function renderLink (commit: Commit) {
|
||||
const maybePull = commit.prKeys.values().next();
|
||||
return maybePull.value ? renderPull(maybePull.value) : renderCommit(commit);
|
||||
}
|
||||
|
||||
// @return a terser branch name,
|
||||
// e.g. '7-2-x' -> '7.2' and '8-x-y' -> '8'
|
||||
const renderBranchName = name => name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
|
||||
const renderBranchName = (name: string) =>
|
||||
name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
|
||||
|
||||
const renderTrop = (branch, ghKey) => `[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
|
||||
const renderTrop = (branch: string, ghKey: GHKey) =>
|
||||
`[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
|
||||
|
||||
// @return markdown-formatted links to other branches' trops,
|
||||
// e.g. "(Also in 7.2, 8, 9)"
|
||||
function renderTrops (commit, excludeBranch) {
|
||||
function renderTrops (commit: Commit, excludeBranch: string) {
|
||||
const body = [...commit.trops.entries()]
|
||||
.filter(([branch]) => branch !== excludeBranch)
|
||||
.sort(([branchA], [branchB]) => parseInt(branchA) - parseInt(branchB)) // sort by semver major
|
||||
.map(([branch, key]) => renderTrop(branch, key))
|
||||
.join(', ');
|
||||
return body ? `<span style="font-size:small;">(Also in ${body})</span>` : body;
|
||||
return body
|
||||
? `<span style="font-size:small;">(Also in ${body})</span>`
|
||||
: body;
|
||||
}
|
||||
|
||||
// @return a slightly cleaned-up human-readable change description
|
||||
function renderDescription (commit) {
|
||||
function renderDescription (commit: Commit) {
|
||||
let note = commit.note || commit.subject || '';
|
||||
note = note.trim();
|
||||
|
||||
|
@ -616,21 +740,26 @@ function renderDescription (commit) {
|
|||
|
||||
// @return markdown-formatted release note line item,
|
||||
// e.g. '* Fixed a foo. #12345 (Also in 7.2, 8, 9)'
|
||||
const renderNote = (commit, excludeBranch) =>
|
||||
`* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(commit, excludeBranch)}\n`;
|
||||
const renderNote = (commit: Commit, excludeBranch: string) =>
|
||||
`* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(
|
||||
commit,
|
||||
excludeBranch
|
||||
)}\n`;
|
||||
|
||||
const renderNotes = (notes, unique = false) => {
|
||||
const renderNotes = (notes: Awaited<ReturnType<typeof getNotes>>, unique = false) => {
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`];
|
||||
|
||||
const renderSection = (title, commits, unique) => {
|
||||
const renderSection = (title: string, commits: Commit[], unique: boolean) => {
|
||||
if (unique) {
|
||||
// omit changes that also landed in other branches
|
||||
commits = commits.filter((commit) => renderTrops(commit, notes.toBranch).length === 0);
|
||||
commits = commits.filter(
|
||||
(commit) => renderTrops(commit, notes.toBranch).length === 0
|
||||
);
|
||||
}
|
||||
if (commits.length > 0) {
|
||||
rendered.push(
|
||||
`## ${title}\n\n`,
|
||||
...(commits.map(commit => renderNote(commit, notes.toBranch)).sort())
|
||||
...commits.map((commit) => renderNote(commit, notes.toBranch)).sort()
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -641,8 +770,12 @@ const renderNotes = (notes, unique = false) => {
|
|||
renderSection('Other Changes', notes.other, unique);
|
||||
|
||||
if (notes.docs.length) {
|
||||
const docs = notes.docs.map(commit => renderLink(commit)).sort();
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n');
|
||||
const docs = notes.docs.map((commit) => renderLink(commit)).sort();
|
||||
rendered.push(
|
||||
'## Documentation\n\n',
|
||||
` * Documentation changes: ${docs.join(', ')}\n`,
|
||||
'\n'
|
||||
);
|
||||
}
|
||||
|
||||
renderSection('Unknown', notes.unknown, unique);
|
||||
|
@ -651,10 +784,8 @@ const renderNotes = (notes, unique = false) => {
|
|||
};
|
||||
|
||||
/***
|
||||
**** Module
|
||||
***/
|
||||
**** Module
|
||||
***/
|
||||
|
||||
module.exports = {
|
||||
get: getNotes,
|
||||
render: renderNotes
|
||||
};
|
||||
export const get = getNotes;
|
||||
export const render = renderNotes;
|
|
@ -1,24 +1,44 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: ['automaticRelease', 'notesOnly', 'stable']
|
||||
});
|
||||
const ciReleaseBuild = require('./ci-release-build');
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { execSync } = require('node:child_process');
|
||||
const { GitProcess } = require('dugite');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { GitProcess } from 'dugite';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { join } from 'node:path';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const path = require('node:path');
|
||||
const readline = require('node:readline');
|
||||
const releaseNotesGenerator = require('./notes/index.js');
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils.js');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
const bumpType = args._[0];
|
||||
import ciReleaseBuild from './ci-release-build';
|
||||
import releaseNotesGenerator from './notes';
|
||||
import { getCurrentBranch, ELECTRON_DIR } from '../lib/utils.js';
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
|
||||
|
||||
const { values: { notesOnly, dryRun: dryRunArg, stable: isStableArg, branch: branchArg, automaticRelease }, positionals } = parseArgs({
|
||||
options: {
|
||||
notesOnly: {
|
||||
type: 'boolean'
|
||||
},
|
||||
dryRun: {
|
||||
type: 'boolean'
|
||||
},
|
||||
stable: {
|
||||
type: 'boolean'
|
||||
},
|
||||
branch: {
|
||||
type: 'string'
|
||||
},
|
||||
automaticRelease: {
|
||||
type: 'boolean'
|
||||
}
|
||||
},
|
||||
allowPositionals: true
|
||||
});
|
||||
|
||||
const bumpType = positionals[0];
|
||||
const targetRepo = getRepo();
|
||||
|
||||
function getRepo () {
|
||||
return bumpType === 'nightly' ? 'nightlies' : 'electron';
|
||||
function getRepo (): ElectronReleaseRepo {
|
||||
return bumpType === 'nightly' ? NIGHTLY_REPO : ELECTRON_REPO;
|
||||
}
|
||||
|
||||
const octokit = new Octokit({
|
||||
|
@ -29,24 +49,34 @@ require('colors');
|
|||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
if (!bumpType && !args.notesOnly) {
|
||||
if (!bumpType && !notesOnly) {
|
||||
console.log('Usage: prepare-release [stable | minor | beta | alpha | nightly]' +
|
||||
' (--stable) (--notesOnly) (--automaticRelease) (--branch)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function getNewVersion (dryRun) {
|
||||
if (!dryRun) {
|
||||
enum DryRunMode {
|
||||
DRY_RUN,
|
||||
REAL_RUN,
|
||||
}
|
||||
|
||||
async function getNewVersion (dryRunMode: DryRunMode) {
|
||||
if (dryRunMode === DryRunMode.REAL_RUN) {
|
||||
console.log(`Bumping for new "${bumpType}" version.`);
|
||||
}
|
||||
const bumpScript = path.join(__dirname, 'version-bumper.js');
|
||||
const scriptArgs = ['node', bumpScript, `--bump=${bumpType}`];
|
||||
if (dryRun) scriptArgs.push('--dryRun');
|
||||
const bumpScript = join(__dirname, 'version-bumper.ts');
|
||||
const scriptArgs = [
|
||||
'node',
|
||||
'node_modules/.bin/ts-node',
|
||||
bumpScript,
|
||||
`--bump=${bumpType}`
|
||||
];
|
||||
if (dryRunMode === DryRunMode.DRY_RUN) scriptArgs.push('--dryRun');
|
||||
try {
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'UTF-8' });
|
||||
let bumpVersion = execSync(scriptArgs.join(' '), { encoding: 'utf-8' });
|
||||
bumpVersion = bumpVersion.substr(bumpVersion.indexOf(':') + 1).trim();
|
||||
const newVersion = `v${bumpVersion}`;
|
||||
if (!dryRun) {
|
||||
if (dryRunMode === DryRunMode.REAL_RUN) {
|
||||
console.log(`${pass} Successfully bumped version to ${newVersion}`);
|
||||
}
|
||||
return newVersion;
|
||||
|
@ -56,7 +86,7 @@ async function getNewVersion (dryRun) {
|
|||
}
|
||||
}
|
||||
|
||||
async function getReleaseNotes (currentBranch, newVersion) {
|
||||
async function getReleaseNotes (currentBranch: string, newVersion: string) {
|
||||
if (bumpType === 'nightly') {
|
||||
return { text: 'Nightlies do not get release notes, please compare tags for info.' };
|
||||
}
|
||||
|
@ -68,8 +98,8 @@ async function getReleaseNotes (currentBranch, newVersion) {
|
|||
return releaseNotes;
|
||||
}
|
||||
|
||||
async function createRelease (branchToTarget, isBeta) {
|
||||
const newVersion = await getNewVersion();
|
||||
async function createRelease (branchToTarget: string, isPreRelease: boolean) {
|
||||
const newVersion = await getNewVersion(DryRunMode.REAL_RUN);
|
||||
const releaseNotes = await getReleaseNotes(branchToTarget, newVersion);
|
||||
await tagRelease(newVersion);
|
||||
|
||||
|
@ -79,6 +109,7 @@ async function createRelease (branchToTarget, isBeta) {
|
|||
repo: targetRepo
|
||||
}).catch(err => {
|
||||
console.log(`${fail} Could not get releases. Error was: `, err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
const drafts = releases.data.filter(release => release.draft &&
|
||||
|
@ -92,7 +123,7 @@ async function createRelease (branchToTarget, isBeta) {
|
|||
|
||||
let releaseBody;
|
||||
let releaseIsPrelease = false;
|
||||
if (isBeta) {
|
||||
if (isPreRelease) {
|
||||
if (newVersion.indexOf('nightly') > 0) {
|
||||
releaseBody = 'Note: This is a nightly release. Please file new issues ' +
|
||||
'for any bugs you find in it.\n \n This release is published to npm ' +
|
||||
|
@ -132,7 +163,7 @@ async function createRelease (branchToTarget, isBeta) {
|
|||
console.log(`${pass} Draft release for ${newVersion} successful.`);
|
||||
}
|
||||
|
||||
async function pushRelease (branch) {
|
||||
async function pushRelease (branch: string) {
|
||||
const pushDetails = await GitProcess.exec(['push', 'origin', `HEAD:${branch}`, '--follow-tags'], ELECTRON_DIR);
|
||||
if (pushDetails.exitCode === 0) {
|
||||
console.log(`${pass} Successfully pushed the release. Wait for ` +
|
||||
|
@ -143,13 +174,15 @@ async function pushRelease (branch) {
|
|||
}
|
||||
}
|
||||
|
||||
async function runReleaseBuilds (branch) {
|
||||
async function runReleaseBuilds (branch: string, newVersion: string) {
|
||||
await ciReleaseBuild(branch, {
|
||||
ghRelease: true
|
||||
ci: undefined,
|
||||
ghRelease: true,
|
||||
newVersion
|
||||
});
|
||||
}
|
||||
|
||||
async function tagRelease (version) {
|
||||
async function tagRelease (version: string) {
|
||||
console.log(`Tagging release ${version}.`);
|
||||
const checkoutDetails = await GitProcess.exec(['tag', '-a', '-m', version, version], ELECTRON_DIR);
|
||||
if (checkoutDetails.exitCode === 0) {
|
||||
|
@ -162,9 +195,9 @@ async function tagRelease (version) {
|
|||
}
|
||||
|
||||
async function verifyNewVersion () {
|
||||
const newVersion = await getNewVersion(true);
|
||||
const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
|
||||
let response;
|
||||
if (args.automaticRelease) {
|
||||
if (automaticRelease) {
|
||||
response = 'y';
|
||||
} else {
|
||||
response = await promptForVersion(newVersion);
|
||||
|
@ -175,11 +208,13 @@ async function verifyNewVersion () {
|
|||
console.log(`${fail} Aborting release of ${newVersion}`);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
return newVersion;
|
||||
}
|
||||
|
||||
async function promptForVersion (version) {
|
||||
return new Promise(resolve => {
|
||||
const rl = readline.createInterface({
|
||||
async function promptForVersion (version: string) {
|
||||
return new Promise<string>(resolve => {
|
||||
const rl = createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
|
@ -197,23 +232,23 @@ async function changesToRelease () {
|
|||
return !lastCommitWasRelease.test(lastCommit.stdout);
|
||||
}
|
||||
|
||||
async function prepareRelease (isBeta, notesOnly) {
|
||||
if (args.dryRun) {
|
||||
const newVersion = await getNewVersion(true);
|
||||
async function prepareRelease (isPreRelease: boolean, dryRunMode: DryRunMode) {
|
||||
if (dryRunMode === DryRunMode.DRY_RUN) {
|
||||
const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
|
||||
console.log(newVersion);
|
||||
} else {
|
||||
const currentBranch = (args.branch) ? args.branch : await getCurrentBranch(ELECTRON_DIR);
|
||||
const currentBranch = branchArg || await getCurrentBranch(ELECTRON_DIR);
|
||||
if (notesOnly) {
|
||||
const newVersion = await getNewVersion(true);
|
||||
const newVersion = await getNewVersion(DryRunMode.DRY_RUN);
|
||||
const releaseNotes = await getReleaseNotes(currentBranch, newVersion);
|
||||
console.log(`Draft release notes are: \n${releaseNotes.text}`);
|
||||
} else {
|
||||
const changes = await changesToRelease();
|
||||
if (changes) {
|
||||
await verifyNewVersion();
|
||||
await createRelease(currentBranch, isBeta);
|
||||
const newVersion = await verifyNewVersion();
|
||||
await createRelease(currentBranch, isPreRelease);
|
||||
await pushRelease(currentBranch);
|
||||
await runReleaseBuilds(currentBranch);
|
||||
await runReleaseBuilds(currentBranch, newVersion);
|
||||
} else {
|
||||
console.log('There are no new changes to this branch since the last release, aborting release.');
|
||||
process.exit(1);
|
||||
|
@ -222,7 +257,7 @@ async function prepareRelease (isBeta, notesOnly) {
|
|||
}
|
||||
}
|
||||
|
||||
prepareRelease(!args.stable, args.notesOnly)
|
||||
prepareRelease(!isStableArg, dryRunArg ? DryRunMode.DRY_RUN : DryRunMode.REAL_RUN)
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
|
@ -1,23 +1,25 @@
|
|||
const temp = require('temp');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
const childProcess = require('node:child_process');
|
||||
const semver = require('semver');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import * as childProcess from 'node:child_process';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as semver from 'semver';
|
||||
import * as temp from 'temp';
|
||||
|
||||
const { getCurrentBranch, ELECTRON_DIR } = require('../lib/utils');
|
||||
const { getElectronVersion } = require('../lib/get-version');
|
||||
const rootPackageJson = require('../../package.json');
|
||||
import { getCurrentBranch, ELECTRON_DIR } from '../lib/utils';
|
||||
import { getElectronVersion } from '../lib/get-version';
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { getAssetContents } = require('./get-asset');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
import { getAssetContents } from './get-asset';
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
|
||||
|
||||
const rootPackageJson = JSON.parse(fs.readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8'));
|
||||
|
||||
if (!process.env.ELECTRON_NPM_OTP) {
|
||||
console.error('Please set ELECTRON_NPM_OTP');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let tempDir;
|
||||
let tempDir: string;
|
||||
temp.track(); // track and cleanup files at exit
|
||||
|
||||
const files = [
|
||||
|
@ -49,11 +51,11 @@ const octokit = new Octokit({
|
|||
authStrategy: createGitHubTokenStrategy(targetRepo)
|
||||
});
|
||||
|
||||
function getRepo () {
|
||||
return isNightlyElectronVersion ? 'nightlies' : 'electron';
|
||||
function getRepo (): ElectronReleaseRepo {
|
||||
return isNightlyElectronVersion ? NIGHTLY_REPO : ELECTRON_REPO;
|
||||
}
|
||||
|
||||
new Promise((resolve, reject) => {
|
||||
new Promise<string>((resolve, reject) => {
|
||||
temp.mkdir('electron-npm', (err, dirPath) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
|
@ -84,7 +86,7 @@ new Promise((resolve, reject) => {
|
|||
);
|
||||
|
||||
return octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
owner: ELECTRON_ORG,
|
||||
repo: targetRepo
|
||||
});
|
||||
})
|
||||
|
@ -124,7 +126,7 @@ new Promise((resolve, reject) => {
|
|||
checksumsAsset.id
|
||||
);
|
||||
|
||||
const checksumsObject = {};
|
||||
const checksumsObject: Record<string, string> = Object.create(null);
|
||||
for (const line of checksumsContent.trim().split('\n')) {
|
||||
const [checksum, file] = line.split(' *');
|
||||
checksumsObject[file] = checksum;
|
||||
|
@ -203,7 +205,7 @@ new Promise((resolve, reject) => {
|
|||
})
|
||||
.then(() => {
|
||||
const currentTags = JSON.parse(childProcess.execSync('npm show electron dist-tags --json').toString());
|
||||
const parsedLocalVersion = semver.parse(currentElectronVersion);
|
||||
const parsedLocalVersion = semver.parse(currentElectronVersion)!;
|
||||
if (rootPackageJson.name === 'electron') {
|
||||
// We should only customly add dist tags for non-nightly releases where the package name is still
|
||||
// "electron"
|
|
@ -1,25 +1,42 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
string: ['tag', 'releaseID'],
|
||||
default: { releaseID: '' }
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
|
||||
|
||||
const { values: { tag: _tag, releaseID } } = parseArgs({
|
||||
options: {
|
||||
tag: {
|
||||
type: 'string'
|
||||
},
|
||||
releaseID: {
|
||||
type: 'string',
|
||||
default: ''
|
||||
}
|
||||
}
|
||||
});
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
|
||||
if (!_tag) {
|
||||
console.error('Missing --tag argument');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const tag = _tag;
|
||||
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
async function deleteDraft (releaseId, targetRepo) {
|
||||
async function deleteDraft (releaseId: string, targetRepo: ElectronReleaseRepo) {
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy(targetRepo)
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await octokit.repos.getRelease({
|
||||
owner: 'electron',
|
||||
owner: ELECTRON_ORG,
|
||||
repo: targetRepo,
|
||||
release_id: parseInt(releaseId, 10)
|
||||
});
|
||||
|
@ -28,7 +45,7 @@ async function deleteDraft (releaseId, targetRepo) {
|
|||
return false;
|
||||
} else {
|
||||
await octokit.repos.deleteRelease({
|
||||
owner: 'electron',
|
||||
owner: ELECTRON_ORG,
|
||||
repo: targetRepo,
|
||||
release_id: result.data.id
|
||||
});
|
||||
|
@ -41,14 +58,14 @@ async function deleteDraft (releaseId, targetRepo) {
|
|||
}
|
||||
}
|
||||
|
||||
async function deleteTag (tag, targetRepo) {
|
||||
async function deleteTag (tag: string, targetRepo: ElectronReleaseRepo) {
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy(targetRepo)
|
||||
});
|
||||
|
||||
try {
|
||||
await octokit.git.deleteRef({
|
||||
owner: 'electron',
|
||||
owner: ELECTRON_ORG,
|
||||
repo: targetRepo,
|
||||
ref: `tags/${tag}`
|
||||
});
|
||||
|
@ -59,31 +76,35 @@ async function deleteTag (tag, targetRepo) {
|
|||
}
|
||||
|
||||
async function cleanReleaseArtifacts () {
|
||||
const releaseId = args.releaseID.length > 0 ? args.releaseID : null;
|
||||
const isNightly = args.tag.includes('nightly');
|
||||
const releaseId = releaseID && releaseID.length > 0 ? releaseID : null;
|
||||
const isNightly = tag.includes('nightly');
|
||||
|
||||
if (releaseId) {
|
||||
if (isNightly) {
|
||||
await deleteDraft(releaseId, 'nightlies');
|
||||
await deleteDraft(releaseId, NIGHTLY_REPO);
|
||||
|
||||
// We only need to delete the Electron tag since the
|
||||
// nightly tag is only created at publish-time.
|
||||
await deleteTag(args.tag, 'electron');
|
||||
await deleteTag(tag, ELECTRON_REPO);
|
||||
} else {
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, 'electron');
|
||||
const deletedElectronDraft = await deleteDraft(releaseId, ELECTRON_REPO);
|
||||
// don't delete tag unless draft deleted successfully
|
||||
if (deletedElectronDraft) {
|
||||
await deleteTag(args.tag, 'electron');
|
||||
await deleteTag(tag, ELECTRON_REPO);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await Promise.all([
|
||||
deleteTag(args.tag, 'electron'),
|
||||
deleteTag(args.tag, 'nightlies')
|
||||
deleteTag(tag, ELECTRON_REPO),
|
||||
deleteTag(tag, NIGHTLY_REPO)
|
||||
]);
|
||||
}
|
||||
|
||||
console.log(`${pass} failed release artifact cleanup complete`);
|
||||
}
|
||||
|
||||
cleanReleaseArtifacts();
|
||||
cleanReleaseArtifacts()
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,36 +1,31 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
import { BlobServiceClient } from '@azure/storage-blob';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import got from 'got';
|
||||
import { execSync, ExecSyncOptions } from 'node:child_process';
|
||||
import { statSync, createReadStream, writeFileSync, close } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { gte } from 'semver';
|
||||
import { track as trackTemp } from 'temp';
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2), {
|
||||
boolean: [
|
||||
'validateRelease',
|
||||
'verboseNugget'
|
||||
],
|
||||
default: { verboseNugget: false }
|
||||
});
|
||||
const fs = require('node:fs');
|
||||
const { execSync } = require('node:child_process');
|
||||
const got = require('got');
|
||||
const path = require('node:path');
|
||||
const semver = require('semver');
|
||||
const temp = require('temp').track();
|
||||
const { BlobServiceClient } = require('@azure/storage-blob');
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
import { ELECTRON_DIR } from '../lib/utils';
|
||||
import { getElectronVersion } from '../lib/get-version';
|
||||
import { getUrlHash } from './get-url-hash';
|
||||
import { createGitHubTokenStrategy } from './github-token';
|
||||
import { ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const temp = trackTemp();
|
||||
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
const fail = '✗'.red;
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
const { getElectronVersion } = require('../lib/get-version');
|
||||
const getUrlHash = require('./get-url-hash');
|
||||
const { createGitHubTokenStrategy } = require('./github-token');
|
||||
|
||||
const pkgVersion = `v${getElectronVersion()}`;
|
||||
|
||||
function getRepo () {
|
||||
return pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
|
||||
function getRepo (): ElectronReleaseRepo {
|
||||
return pkgVersion.indexOf('nightly') > 0 ? NIGHTLY_REPO : ELECTRON_REPO;
|
||||
}
|
||||
|
||||
const targetRepo = getRepo();
|
||||
|
@ -40,14 +35,17 @@ const octokit = new Octokit({
|
|||
authStrategy: createGitHubTokenStrategy(targetRepo)
|
||||
});
|
||||
|
||||
async function getDraftRelease (version, skipValidation) {
|
||||
async function getDraftRelease (
|
||||
version?: string,
|
||||
skipValidation: boolean = false
|
||||
) {
|
||||
const releaseInfo = await octokit.repos.listReleases({
|
||||
owner: 'electron',
|
||||
repo: targetRepo
|
||||
});
|
||||
|
||||
const versionToCheck = version || pkgVersion;
|
||||
const drafts = releaseInfo.data.filter(release => {
|
||||
const drafts = releaseInfo.data.filter((release) => {
|
||||
return release.tag_name === versionToCheck && release.draft === true;
|
||||
});
|
||||
|
||||
|
@ -58,38 +56,66 @@ async function getDraftRelease (version, skipValidation) {
|
|||
if (versionToCheck.includes('beta')) {
|
||||
check(draft.prerelease, 'draft is a prerelease');
|
||||
}
|
||||
check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes');
|
||||
check((failureCount === 0), 'Draft release looks good to go.', true);
|
||||
check(
|
||||
!!draft.body &&
|
||||
draft.body.length > 50 &&
|
||||
!draft.body.includes('(placeholder)'),
|
||||
'draft has release notes'
|
||||
);
|
||||
check(failureCount === 0, 'Draft release looks good to go.', true);
|
||||
}
|
||||
return draft;
|
||||
}
|
||||
|
||||
async function validateReleaseAssets (release, validatingRelease) {
|
||||
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
|
||||
const extantAssets = release.assets.map(asset => asset.name).sort();
|
||||
const downloadUrls = release.assets.map(asset => ({ url: asset.browser_download_url, file: asset.name })).sort((a, b) => a.file.localeCompare(b.file));
|
||||
type MinimalRelease = {
|
||||
id: number;
|
||||
tag_name: string;
|
||||
draft: boolean;
|
||||
prerelease: boolean;
|
||||
assets: {
|
||||
name: string;
|
||||
browser_download_url: string;
|
||||
id: number;
|
||||
}[];
|
||||
};
|
||||
|
||||
async function validateReleaseAssets (
|
||||
release: MinimalRelease,
|
||||
validatingRelease: boolean = false
|
||||
) {
|
||||
const requiredAssets = assetsForVersion(
|
||||
release.tag_name,
|
||||
validatingRelease
|
||||
).sort();
|
||||
const extantAssets = release.assets.map((asset) => asset.name).sort();
|
||||
const downloadUrls = release.assets
|
||||
.map((asset) => ({ url: asset.browser_download_url, file: asset.name }))
|
||||
.sort((a, b) => a.file.localeCompare(b.file));
|
||||
|
||||
failureCount = 0;
|
||||
for (const asset of requiredAssets) {
|
||||
check(extantAssets.includes(asset), asset);
|
||||
}
|
||||
check((failureCount === 0), 'All required GitHub assets exist for release', true);
|
||||
check(
|
||||
failureCount === 0,
|
||||
'All required GitHub assets exist for release',
|
||||
true
|
||||
);
|
||||
|
||||
if (!validatingRelease || !release.draft) {
|
||||
if (release.draft) {
|
||||
await verifyDraftGitHubReleaseAssets(release);
|
||||
} else {
|
||||
await verifyShasumsForRemoteFiles(downloadUrls)
|
||||
.catch(err => {
|
||||
console.error(`${fail} error verifyingShasums`, err);
|
||||
});
|
||||
await verifyShasumsForRemoteFiles(downloadUrls).catch((err) => {
|
||||
console.error(`${fail} error verifyingShasums`, err);
|
||||
});
|
||||
}
|
||||
const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
|
||||
await verifyShasumsForRemoteFiles(azRemoteFiles, true);
|
||||
}
|
||||
}
|
||||
|
||||
function check (condition, statement, exitIfFail = false) {
|
||||
function check (condition: boolean, statement: string, exitIfFail = false) {
|
||||
if (condition) {
|
||||
console.log(`${pass} ${statement}`);
|
||||
} else {
|
||||
|
@ -99,7 +125,7 @@ function check (condition, statement, exitIfFail = false) {
|
|||
}
|
||||
}
|
||||
|
||||
function assetsForVersion (version, validatingRelease) {
|
||||
function assetsForVersion (version: string, validatingRelease: boolean) {
|
||||
const patterns = [
|
||||
`chromedriver-${version}-darwin-x64.zip`,
|
||||
`chromedriver-${version}-darwin-arm64.zip`,
|
||||
|
@ -181,7 +207,7 @@ function assetsForVersion (version, validatingRelease) {
|
|||
return patterns;
|
||||
}
|
||||
|
||||
const cloudStoreFilePaths = (version) => [
|
||||
const cloudStoreFilePaths = (version: string) => [
|
||||
`iojs-${version}-headers.tar.gz`,
|
||||
`iojs-${version}.tar.gz`,
|
||||
`node-${version}.tar.gz`,
|
||||
|
@ -198,7 +224,7 @@ const cloudStoreFilePaths = (version) => [
|
|||
'SHASUMS256.txt'
|
||||
];
|
||||
|
||||
function azRemoteFilesForVersion (version) {
|
||||
function azRemoteFilesForVersion (version: string) {
|
||||
const azCDN = 'https://artifacts.electronjs.org/headers/';
|
||||
const versionPrefix = `${azCDN}dist/${version}/`;
|
||||
return cloudStoreFilePaths(version).map((filePath) => ({
|
||||
|
@ -207,10 +233,10 @@ function azRemoteFilesForVersion (version) {
|
|||
}));
|
||||
}
|
||||
|
||||
function runScript (scriptName, scriptArgs, cwd) {
|
||||
function runScript (scriptName: string, scriptArgs: string[], cwd?: string) {
|
||||
const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
|
||||
const scriptOptions = {
|
||||
encoding: 'UTF-8'
|
||||
const scriptOptions: ExecSyncOptions = {
|
||||
encoding: 'utf-8'
|
||||
};
|
||||
if (cwd) scriptOptions.cwd = cwd;
|
||||
try {
|
||||
|
@ -223,29 +249,48 @@ function runScript (scriptName, scriptArgs, cwd) {
|
|||
|
||||
function uploadNodeShasums () {
|
||||
console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
|
||||
const scriptPath = join(
|
||||
ELECTRON_DIR,
|
||||
'script',
|
||||
'release',
|
||||
'uploaders',
|
||||
'upload-node-checksums.py'
|
||||
);
|
||||
runScript(scriptPath, ['-v', pkgVersion]);
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`);
|
||||
console.log(
|
||||
`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`
|
||||
);
|
||||
}
|
||||
|
||||
function uploadIndexJson () {
|
||||
console.log('Uploading index.json to artifacts.electronjs.org.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
|
||||
const scriptPath = join(
|
||||
ELECTRON_DIR,
|
||||
'script',
|
||||
'release',
|
||||
'uploaders',
|
||||
'upload-index-json.py'
|
||||
);
|
||||
runScript(scriptPath, [pkgVersion]);
|
||||
console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
|
||||
}
|
||||
|
||||
async function mergeShasums (pkgVersion) {
|
||||
async function mergeShasums (pkgVersion: string) {
|
||||
// Download individual checksum files for Electron zip files from artifact storage,
|
||||
// concatenate them, and upload to GitHub.
|
||||
|
||||
const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
|
||||
if (!connectionString) {
|
||||
throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable');
|
||||
throw new Error(
|
||||
'Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable'
|
||||
);
|
||||
}
|
||||
|
||||
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
|
||||
const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad');
|
||||
const blobServiceClient =
|
||||
BlobServiceClient.fromConnectionString(connectionString);
|
||||
const containerClient = blobServiceClient.getContainerClient(
|
||||
'checksums-scratchpad'
|
||||
);
|
||||
const blobsIter = containerClient.listBlobsFlat({
|
||||
prefix: `${pkgVersion}/`
|
||||
});
|
||||
|
@ -260,19 +305,25 @@ async function mergeShasums (pkgVersion) {
|
|||
return shasums.join('\n');
|
||||
}
|
||||
|
||||
async function createReleaseShasums (release) {
|
||||
async function createReleaseShasums (release: MinimalRelease) {
|
||||
const fileName = 'SHASUMS256.txt';
|
||||
const existingAssets = release.assets.filter(asset => asset.name === fileName);
|
||||
const existingAssets = release.assets.filter(
|
||||
(asset) => asset.name === fileName
|
||||
);
|
||||
if (existingAssets.length > 0) {
|
||||
console.log(`${fileName} already exists on GitHub; deleting before creating new file.`);
|
||||
await octokit.repos.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
}).catch(err => {
|
||||
console.error(`${fail} Error deleting ${fileName} on GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
console.log(
|
||||
`${fileName} already exists on GitHub; deleting before creating new file.`
|
||||
);
|
||||
await octokit.repos
|
||||
.deleteReleaseAsset({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: existingAssets[0].id
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`${fail} Error deleting ${fileName} on GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
console.log(`Creating and uploading the release ${fileName}.`);
|
||||
const checksums = await mergeShasums(pkgVersion);
|
||||
|
@ -286,31 +337,37 @@ async function createReleaseShasums (release) {
|
|||
console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
|
||||
}
|
||||
|
||||
async function uploadShasumFile (filePath, fileName, releaseId) {
|
||||
async function uploadShasumFile (
|
||||
filePath: string,
|
||||
fileName: string,
|
||||
releaseId: number
|
||||
) {
|
||||
const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
|
||||
return octokit.repos.uploadReleaseAsset({
|
||||
url: uploadUrl,
|
||||
headers: {
|
||||
'content-type': 'text/plain',
|
||||
'content-length': fs.statSync(filePath).size
|
||||
},
|
||||
data: fs.createReadStream(filePath),
|
||||
name: fileName
|
||||
}).catch(err => {
|
||||
console.error(`${fail} Error uploading ${filePath} to GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
return octokit.repos
|
||||
.uploadReleaseAsset({
|
||||
url: uploadUrl,
|
||||
headers: {
|
||||
'content-type': 'text/plain',
|
||||
'content-length': statSync(filePath).size
|
||||
},
|
||||
data: createReadStream(filePath),
|
||||
name: fileName
|
||||
} as any)
|
||||
.catch((err) => {
|
||||
console.error(`${fail} Error uploading ${filePath} to GitHub:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
function saveShaSumFile (checksums, fileName) {
|
||||
return new Promise(resolve => {
|
||||
function saveShaSumFile (checksums: string, fileName: string) {
|
||||
return new Promise<string>((resolve) => {
|
||||
temp.open(fileName, (err, info) => {
|
||||
if (err) {
|
||||
console.error(`${fail} Could not create ${fileName} file`);
|
||||
process.exit(1);
|
||||
} else {
|
||||
fs.writeFileSync(info.fd, checksums);
|
||||
fs.close(info.fd, (err) => {
|
||||
writeFileSync(info.fd, checksums);
|
||||
close(info.fd, (err) => {
|
||||
if (err) {
|
||||
console.error(`${fail} Could close ${fileName} file`);
|
||||
process.exit(1);
|
||||
|
@ -322,7 +379,7 @@ function saveShaSumFile (checksums, fileName) {
|
|||
});
|
||||
}
|
||||
|
||||
async function publishRelease (release) {
|
||||
async function publishRelease (release: MinimalRelease) {
|
||||
let makeLatest = false;
|
||||
if (!release.prerelease) {
|
||||
const currentLatest = await octokit.repos.getLatestRelease({
|
||||
|
@ -330,23 +387,25 @@ async function publishRelease (release) {
|
|||
repo: targetRepo
|
||||
});
|
||||
|
||||
makeLatest = semver.gte(release.tag_name, currentLatest.data.tag_name);
|
||||
makeLatest = gte(release.tag_name, currentLatest.data.tag_name);
|
||||
}
|
||||
|
||||
return octokit.repos.updateRelease({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: release.id,
|
||||
tag_name: release.tag_name,
|
||||
draft: false,
|
||||
make_latest: makeLatest ? 'true' : 'false'
|
||||
}).catch(err => {
|
||||
console.error(`${fail} Error publishing release:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
return octokit.repos
|
||||
.updateRelease({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
release_id: release.id,
|
||||
tag_name: release.tag_name,
|
||||
draft: false,
|
||||
make_latest: makeLatest ? 'true' : 'false'
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`${fail} Error publishing release:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
async function makeRelease (releaseToValidate) {
|
||||
async function makeRelease (releaseToValidate: string | boolean) {
|
||||
if (releaseToValidate) {
|
||||
if (releaseToValidate === true) {
|
||||
releaseToValidate = pkgVersion;
|
||||
|
@ -371,44 +430,52 @@ async function makeRelease (releaseToValidate) {
|
|||
// in index.json, which causes other problems in downstream projects
|
||||
uploadIndexJson();
|
||||
await publishRelease(draftRelease);
|
||||
console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
|
||||
'"npm run publish-to-npm" to publish release to npm.');
|
||||
console.log(
|
||||
`${pass} SUCCESS!!! Release has been published. Please run ` +
|
||||
'"npm run publish-to-npm" to publish release to npm.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const SHASUM_256_FILENAME = 'SHASUMS256.txt';
|
||||
const SHASUM_1_FILENAME = 'SHASUMS.txt';
|
||||
|
||||
async function verifyDraftGitHubReleaseAssets (release) {
|
||||
async function verifyDraftGitHubReleaseAssets (release: MinimalRelease) {
|
||||
console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
|
||||
|
||||
const remoteFilesToHash = await Promise.all(release.assets.map(async asset => {
|
||||
const requestOptions = octokit.repos.getReleaseAsset.endpoint({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: asset.id,
|
||||
headers: {
|
||||
Accept: 'application/octet-stream'
|
||||
const remoteFilesToHash = await Promise.all(
|
||||
release.assets.map(async (asset) => {
|
||||
const requestOptions = octokit.repos.getReleaseAsset.endpoint({
|
||||
owner: 'electron',
|
||||
repo: targetRepo,
|
||||
asset_id: asset.id,
|
||||
headers: {
|
||||
Accept: 'application/octet-stream'
|
||||
}
|
||||
});
|
||||
|
||||
const { url, headers } = requestOptions;
|
||||
headers.authorization = `token ${
|
||||
((await octokit.auth()) as { token: string }).token
|
||||
}`;
|
||||
|
||||
const response = await got(url, {
|
||||
followRedirect: false,
|
||||
method: 'HEAD',
|
||||
headers: headers as any,
|
||||
throwHttpErrors: false
|
||||
});
|
||||
|
||||
if (response.statusCode !== 302 && response.statusCode !== 301) {
|
||||
console.error('Failed to HEAD github asset: ' + url);
|
||||
throw new Error(
|
||||
"Unexpected status HEAD'ing github asset: " + response.statusCode
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
const { url, headers } = requestOptions;
|
||||
headers.authorization = `token ${(await octokit.auth()).token}`;
|
||||
|
||||
const response = await got(url, {
|
||||
followRedirect: false,
|
||||
method: 'HEAD',
|
||||
headers,
|
||||
throwHttpErrors: false
|
||||
});
|
||||
|
||||
if (response.statusCode !== 302 && response.statusCode !== 301) {
|
||||
console.error('Failed to HEAD github asset: ' + url);
|
||||
throw new Error('Unexpected status HEAD\'ing github asset: ' + response.statusCode);
|
||||
}
|
||||
|
||||
return { url: response.headers.location, file: asset.name };
|
||||
})).catch(err => {
|
||||
return { url: response.headers.location!, file: asset.name };
|
||||
})
|
||||
).catch((err) => {
|
||||
console.error(`${fail} Error downloading files from GitHub`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
@ -416,7 +483,10 @@ async function verifyDraftGitHubReleaseAssets (release) {
|
|||
await verifyShasumsForRemoteFiles(remoteFilesToHash);
|
||||
}
|
||||
|
||||
async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
|
||||
async function getShaSumMappingFromUrl (
|
||||
shaSumFileUrl: string,
|
||||
fileNamePrefix: string
|
||||
) {
|
||||
const response = await got(shaSumFileUrl, {
|
||||
throwHttpErrors: false
|
||||
});
|
||||
|
@ -424,55 +494,115 @@ async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
|
|||
if (response.statusCode !== 200) {
|
||||
console.error('Failed to fetch SHASUM mapping: ' + shaSumFileUrl);
|
||||
console.error('Bad SHASUM mapping response: ' + response.body.trim());
|
||||
throw new Error('Unexpected status fetching SHASUM mapping: ' + response.statusCode);
|
||||
throw new Error(
|
||||
'Unexpected status fetching SHASUM mapping: ' + response.statusCode
|
||||
);
|
||||
}
|
||||
|
||||
const raw = response.body;
|
||||
return raw.split('\n').map(line => line.trim()).filter(Boolean).reduce((map, line) => {
|
||||
const [sha, file] = line.replace(' ', ' ').split(' ');
|
||||
map[file.slice(fileNamePrefix.length)] = sha;
|
||||
return map;
|
||||
}, {});
|
||||
return raw
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.reduce((map, line) => {
|
||||
const [sha, file] = line.replace(' ', ' ').split(' ');
|
||||
map[file.slice(fileNamePrefix.length)] = sha;
|
||||
return map;
|
||||
}, Object.create(null) as Record<string, string>);
|
||||
}
|
||||
|
||||
async function validateFileHashesAgainstShaSumMapping (remoteFilesWithHashes, mapping) {
|
||||
type HashedFile = HashableFile & {
|
||||
hash: string;
|
||||
};
|
||||
|
||||
type HashableFile = {
|
||||
file: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
async function validateFileHashesAgainstShaSumMapping (
|
||||
remoteFilesWithHashes: HashedFile[],
|
||||
mapping: Record<string, string>
|
||||
) {
|
||||
for (const remoteFileWithHash of remoteFilesWithHashes) {
|
||||
check(remoteFileWithHash.hash === mapping[remoteFileWithHash.file], `Release asset ${remoteFileWithHash.file} should have hash of ${mapping[remoteFileWithHash.file]} but found ${remoteFileWithHash.hash}`, true);
|
||||
check(
|
||||
remoteFileWithHash.hash === mapping[remoteFileWithHash.file],
|
||||
`Release asset ${remoteFileWithHash.file} should have hash of ${
|
||||
mapping[remoteFileWithHash.file]
|
||||
} but found ${remoteFileWithHash.hash}`,
|
||||
true
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyShasumsForRemoteFiles (remoteFilesToHash, filesAreNodeJSArtifacts = false) {
|
||||
console.log(`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`);
|
||||
async function verifyShasumsForRemoteFiles (
|
||||
remoteFilesToHash: HashableFile[],
|
||||
filesAreNodeJSArtifacts = false
|
||||
) {
|
||||
console.log(
|
||||
`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`
|
||||
);
|
||||
|
||||
// Only used for node.js artifact uploads
|
||||
const shaSum1File = remoteFilesToHash.find(({ file }) => file === SHASUM_1_FILENAME);
|
||||
const shaSum1File = remoteFilesToHash.find(
|
||||
({ file }) => file === SHASUM_1_FILENAME
|
||||
)!;
|
||||
// Used for both node.js artifact uploads and normal electron artifacts
|
||||
const shaSum256File = remoteFilesToHash.find(({ file }) => file === SHASUM_256_FILENAME);
|
||||
remoteFilesToHash = remoteFilesToHash.filter(({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME);
|
||||
const shaSum256File = remoteFilesToHash.find(
|
||||
({ file }) => file === SHASUM_256_FILENAME
|
||||
)!;
|
||||
remoteFilesToHash = remoteFilesToHash.filter(
|
||||
({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME
|
||||
);
|
||||
|
||||
const remoteFilesWithHashes = await Promise.all(remoteFilesToHash.map(async (file) => {
|
||||
return {
|
||||
hash: await getUrlHash(file.url, 'sha256'),
|
||||
...file
|
||||
};
|
||||
}));
|
||||
|
||||
await validateFileHashesAgainstShaSumMapping(remoteFilesWithHashes, await getShaSumMappingFromUrl(shaSum256File.url, filesAreNodeJSArtifacts ? '' : '*'));
|
||||
|
||||
if (filesAreNodeJSArtifacts) {
|
||||
const remoteFilesWithSha1Hashes = await Promise.all(remoteFilesToHash.map(async (file) => {
|
||||
const remoteFilesWithHashes = await Promise.all(
|
||||
remoteFilesToHash.map(async (file) => {
|
||||
return {
|
||||
hash: await getUrlHash(file.url, 'sha1'),
|
||||
hash: await getUrlHash(file.url, 'sha256'),
|
||||
...file
|
||||
};
|
||||
}));
|
||||
})
|
||||
);
|
||||
|
||||
await validateFileHashesAgainstShaSumMapping(remoteFilesWithSha1Hashes, await getShaSumMappingFromUrl(shaSum1File.url, filesAreNodeJSArtifacts ? '' : '*'));
|
||||
await validateFileHashesAgainstShaSumMapping(
|
||||
remoteFilesWithHashes,
|
||||
await getShaSumMappingFromUrl(
|
||||
shaSum256File.url,
|
||||
filesAreNodeJSArtifacts ? '' : '*'
|
||||
)
|
||||
);
|
||||
|
||||
if (filesAreNodeJSArtifacts) {
|
||||
const remoteFilesWithSha1Hashes = await Promise.all(
|
||||
remoteFilesToHash.map(async (file) => {
|
||||
return {
|
||||
hash: await getUrlHash(file.url, 'sha1'),
|
||||
...file
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
await validateFileHashesAgainstShaSumMapping(
|
||||
remoteFilesWithSha1Hashes,
|
||||
await getShaSumMappingFromUrl(
|
||||
shaSum1File.url,
|
||||
filesAreNodeJSArtifacts ? '' : '*'
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
makeRelease(args.validateRelease)
|
||||
.catch((err) => {
|
||||
console.error('Error occurred while making release:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
const {
|
||||
values: { validateRelease }
|
||||
} = parseArgs({
|
||||
options: {
|
||||
validateRelease: {
|
||||
type: 'boolean'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
makeRelease(!!validateRelease).catch((err) => {
|
||||
console.error('Error occurred while making release:', err);
|
||||
process.exit(1);
|
||||
});
|
7
script/release/types.ts
Normal file
7
script/release/types.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
export const ELECTRON_ORG = 'electron';
|
||||
export const ELECTRON_REPO = 'electron';
|
||||
export const NIGHTLY_REPO = 'nightlies';
|
||||
|
||||
export type ElectronReleaseRepo = 'electron' | 'nightlies';
|
||||
|
||||
export type VersionBumpType = 'nightly' | 'alpha' | 'beta' | 'minor' | 'stable';
|
|
@ -2,8 +2,6 @@ import { Octokit } from '@octokit/rest';
|
|||
import * as fs from 'node:fs';
|
||||
import { createGitHubTokenStrategy } from '../github-token';
|
||||
|
||||
if (!process.env.CI) require('dotenv-safe').load();
|
||||
|
||||
if (process.argv.length < 6) {
|
||||
console.log('Usage: upload-to-github filePath fileName releaseId');
|
||||
process.exit(1);
|
||||
|
|
|
@ -385,12 +385,12 @@ def upload_sha256_checksum(version, file_path, key_prefix=None):
|
|||
|
||||
def get_release(version):
|
||||
script_path = os.path.join(
|
||||
ELECTRON_DIR, 'script', 'release', 'find-github-release.js')
|
||||
ELECTRON_DIR, 'script', 'release', 'find-github-release.ts')
|
||||
|
||||
# Strip warnings from stdout to ensure the only output is the desired object
|
||||
release_env = os.environ.copy()
|
||||
release_env['NODE_NO_WARNINGS'] = '1'
|
||||
release_info = execute(['node', script_path, version], release_env)
|
||||
release_info = execute([TS_NODE, script_path, version], release_env)
|
||||
if is_verbose_mode():
|
||||
print(f'Release info for version: {version}:\n')
|
||||
print(release_info)
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const semver = require('semver');
|
||||
const minimist = require('minimist');
|
||||
|
||||
const { getElectronVersion } = require('../lib/get-version');
|
||||
const versionUtils = require('./version-utils');
|
||||
|
||||
function parseCommandLine () {
|
||||
let help;
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
string: ['bump', 'version'],
|
||||
boolean: ['dryRun', 'help'],
|
||||
alias: { version: ['v'] },
|
||||
unknown: () => { help = true; }
|
||||
});
|
||||
if (help || opts.help || !opts.bump) {
|
||||
console.log(`
|
||||
Bump release version number. Possible arguments:\n
|
||||
--bump=patch to increment patch version\n
|
||||
--version={version} to set version number directly\n
|
||||
--dryRun to print the next version without updating files
|
||||
Note that you can use both --bump and --stable simultaneously.
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
return opts;
|
||||
}
|
||||
|
||||
// run the script
|
||||
async function main () {
|
||||
const opts = parseCommandLine();
|
||||
const currentVersion = getElectronVersion();
|
||||
const version = await nextVersion(opts.bump, currentVersion);
|
||||
|
||||
// print would-be new version and exit early
|
||||
if (opts.dryRun) {
|
||||
console.log(`new version number would be: ${version}\n`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Bumped to version: ${version}`);
|
||||
}
|
||||
|
||||
// get next version for release based on [nightly, alpha, beta, stable]
|
||||
async function nextVersion (bumpType, version) {
|
||||
if (
|
||||
versionUtils.isNightly(version) ||
|
||||
versionUtils.isAlpha(version) ||
|
||||
versionUtils.isBeta(version)
|
||||
) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = await versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'alpha':
|
||||
version = await versionUtils.nextAlpha(version);
|
||||
break;
|
||||
case 'beta':
|
||||
version = await versionUtils.nextBeta(version);
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.valid(semver.coerce(version));
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else if (versionUtils.isStable(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = versionUtils.nextNightly(version);
|
||||
break;
|
||||
case 'alpha':
|
||||
throw new Error('Cannot bump to alpha from stable.');
|
||||
case 'beta':
|
||||
throw new Error('Cannot bump to beta from stable.');
|
||||
case 'minor':
|
||||
version = semver.inc(version, 'minor');
|
||||
break;
|
||||
case 'stable':
|
||||
version = semver.inc(version, 'patch');
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid current version: ${version}`);
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { nextVersion };
|
106
script/release/version-bumper.ts
Normal file
106
script/release/version-bumper.ts
Normal file
|
@ -0,0 +1,106 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { valid, coerce, inc } from 'semver';
|
||||
|
||||
import { getElectronVersion } from '../lib/get-version';
|
||||
import {
|
||||
isNightly,
|
||||
isAlpha,
|
||||
isBeta,
|
||||
nextNightly,
|
||||
nextAlpha,
|
||||
nextBeta,
|
||||
isStable
|
||||
} from './version-utils';
|
||||
import { VersionBumpType } from './types';
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
// run the script
|
||||
async function main () {
|
||||
const { values: { bump, dryRun, help } } = parseArgs({
|
||||
options: {
|
||||
bump: {
|
||||
type: 'string'
|
||||
},
|
||||
dryRun: {
|
||||
type: 'boolean'
|
||||
},
|
||||
help: {
|
||||
type: 'boolean'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!bump || help) {
|
||||
console.log(`
|
||||
Bump release version number. Possible arguments:\n
|
||||
--bump=patch to increment patch version\n
|
||||
--version={version} to set version number directly\n
|
||||
--dryRun to print the next version without updating files
|
||||
Note that you can use both --bump and --stable simultaneously.
|
||||
`);
|
||||
if (!bump) process.exit(0);
|
||||
else process.exit(1);
|
||||
}
|
||||
|
||||
const currentVersion = getElectronVersion();
|
||||
const version = await nextVersion(bump as VersionBumpType, currentVersion);
|
||||
|
||||
// print would-be new version and exit early
|
||||
if (dryRun) {
|
||||
console.log(`new version number would be: ${version}\n`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Bumped to version: ${version}`);
|
||||
}
|
||||
|
||||
// get next version for release based on [nightly, alpha, beta, stable]
|
||||
export async function nextVersion (bumpType: VersionBumpType, version: string) {
|
||||
if (isNightly(version) || isAlpha(version) || isBeta(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = await nextNightly(version);
|
||||
break;
|
||||
case 'alpha':
|
||||
version = await nextAlpha(version);
|
||||
break;
|
||||
case 'beta':
|
||||
version = await nextBeta(version);
|
||||
break;
|
||||
case 'stable':
|
||||
version = valid(coerce(version))!;
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else if (isStable(version)) {
|
||||
switch (bumpType) {
|
||||
case 'nightly':
|
||||
version = await nextNightly(version);
|
||||
break;
|
||||
case 'alpha':
|
||||
throw new Error('Cannot bump to alpha from stable.');
|
||||
case 'beta':
|
||||
throw new Error('Cannot bump to beta from stable.');
|
||||
case 'minor':
|
||||
version = inc(version, 'minor')!;
|
||||
break;
|
||||
case 'stable':
|
||||
version = inc(version, 'patch')!;
|
||||
break;
|
||||
default:
|
||||
throw new Error('Invalid bump type.');
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Invalid current version: ${version}`);
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
|
@ -1,13 +1,13 @@
|
|||
const semver = require('semver');
|
||||
const { GitProcess } = require('dugite');
|
||||
import * as semver from 'semver';
|
||||
import { GitProcess } from 'dugite';
|
||||
|
||||
const { ELECTRON_DIR } = require('../lib/utils');
|
||||
import { ELECTRON_DIR } from '../lib/utils';
|
||||
|
||||
const preType = {
|
||||
NONE: 'none',
|
||||
PARTIAL: 'partial',
|
||||
FULL: 'full'
|
||||
};
|
||||
export enum PreType {
|
||||
NONE = 'none',
|
||||
PARTIAL = ' partial',
|
||||
FULL = 'full',
|
||||
}
|
||||
|
||||
const getCurrentDate = () => {
|
||||
const d = new Date();
|
||||
|
@ -17,53 +17,43 @@ const getCurrentDate = () => {
|
|||
return `${yyyy}${mm}${dd}`;
|
||||
};
|
||||
|
||||
const isNightly = v => v.includes('nightly');
|
||||
const isAlpha = v => v.includes('alpha');
|
||||
const isBeta = v => v.includes('beta');
|
||||
const isStable = v => {
|
||||
export const isNightly = (v: string) => v.includes('nightly');
|
||||
export const isAlpha = (v: string) => v.includes('alpha');
|
||||
export const isBeta = (v: string) => v.includes('beta');
|
||||
export const isStable = (v: string) => {
|
||||
const parsed = semver.parse(v);
|
||||
return !!(parsed && parsed.prerelease.length === 0);
|
||||
};
|
||||
|
||||
const makeVersion = (components, delim, pre = preType.NONE) => {
|
||||
let version = [components.major, components.minor, components.patch].join(delim);
|
||||
if (pre === preType.PARTIAL) {
|
||||
version += `${delim}${components.pre[1] || 0}`;
|
||||
} else if (pre === preType.FULL) {
|
||||
version += `-${components.pre[0]}${delim}${components.pre[1]}`;
|
||||
}
|
||||
return version;
|
||||
};
|
||||
|
||||
async function nextAlpha (v) {
|
||||
export async function nextAlpha (v: string) {
|
||||
const next = semver.coerce(semver.clean(v));
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-alpha.*`], ELECTRON_DIR);
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
|
||||
tags.sort((t1, t2) => {
|
||||
const a = parseInt(t1.split('.').pop(), 10);
|
||||
const b = parseInt(t2.split('.').pop(), 10);
|
||||
const a = parseInt(t1.split('.').pop()!, 10);
|
||||
const b = parseInt(t2.split('.').pop()!, 10);
|
||||
return a - b;
|
||||
});
|
||||
|
||||
// increment the latest existing alpha tag or start at alpha.1 if it's a new alpha line
|
||||
return tags.length === 0 ? `${next}-alpha.1` : semver.inc(tags.pop(), 'prerelease');
|
||||
return tags.length === 0 ? `${next}-alpha.1` : semver.inc(tags.pop()!, 'prerelease')!;
|
||||
}
|
||||
|
||||
async function nextBeta (v) {
|
||||
export async function nextBeta (v: string) {
|
||||
const next = semver.coerce(semver.clean(v));
|
||||
const tagBlob = await GitProcess.exec(['tag', '--list', '-l', `v${next}-beta.*`], ELECTRON_DIR);
|
||||
const tags = tagBlob.stdout.split('\n').filter(e => e !== '');
|
||||
tags.sort((t1, t2) => {
|
||||
const a = parseInt(t1.split('.').pop(), 10);
|
||||
const b = parseInt(t2.split('.').pop(), 10);
|
||||
const a = parseInt(t1.split('.').pop()!, 10);
|
||||
const b = parseInt(t2.split('.').pop()!, 10);
|
||||
return a - b;
|
||||
});
|
||||
|
||||
// increment the latest existing beta tag or start at beta.1 if it's a new beta line
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop(), 'prerelease');
|
||||
return tags.length === 0 ? `${next}-beta.1` : semver.inc(tags.pop()!, 'prerelease')!;
|
||||
}
|
||||
|
||||
async function nextNightly (v) {
|
||||
export async function nextNightly (v: string) {
|
||||
let next = semver.valid(semver.coerce(v));
|
||||
const pre = `nightly.${getCurrentDate()}`;
|
||||
|
||||
|
@ -71,7 +61,7 @@ async function nextNightly (v) {
|
|||
if (branch === 'main') {
|
||||
next = semver.inc(await getLastMajorForMain(), 'major');
|
||||
} else if (isStable(v)) {
|
||||
next = semver.inc(next, 'patch');
|
||||
next = semver.inc(next!, 'patch');
|
||||
}
|
||||
|
||||
return `${next}-${pre}`;
|
||||
|
@ -89,19 +79,7 @@ async function getLastMajorForMain () {
|
|||
}
|
||||
}
|
||||
|
||||
function getNextReleaseBranch (branches) {
|
||||
function getNextReleaseBranch (branches: string[]) {
|
||||
const converted = branches.map(b => b.replace(/-/g, '.').replace('x', '0').replace('y', '0'));
|
||||
return converted.reduce((v1, v2) => semver.gt(v1, v2) ? v1 : v2);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isStable,
|
||||
isAlpha,
|
||||
isBeta,
|
||||
isNightly,
|
||||
nextAlpha,
|
||||
nextBeta,
|
||||
makeVersion,
|
||||
nextNightly,
|
||||
preType
|
||||
};
|
|
@ -1,6 +1,6 @@
|
|||
import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite';
|
||||
import { expect } from 'chai';
|
||||
import * as notes from '../script/release/notes/notes.js';
|
||||
import * as notes from '../script/release/notes/notes';
|
||||
import * as path from 'node:path';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { expect } from 'chai';
|
||||
import { GitProcess, IGitExecutionOptions, IGitResult } from 'dugite';
|
||||
import { nextVersion } from '../script/release/version-bumper';
|
||||
import * as utils from '../script/release/version-utils';
|
||||
import * as sinon from 'sinon';
|
||||
import { ifdescribe } from './lib/spec-helpers';
|
||||
|
||||
|
@ -53,43 +52,6 @@ class GitFake {
|
|||
}
|
||||
|
||||
describe('version-bumper', () => {
|
||||
describe('makeVersion', () => {
|
||||
it('makes a version with a period delimiter', () => {
|
||||
const components = {
|
||||
major: 2,
|
||||
minor: 0,
|
||||
patch: 0
|
||||
};
|
||||
|
||||
const version = utils.makeVersion(components, '.');
|
||||
expect(version).to.equal('2.0.0');
|
||||
});
|
||||
|
||||
it('makes a version with a period delimiter and a partial pre', () => {
|
||||
const components = {
|
||||
major: 2,
|
||||
minor: 0,
|
||||
patch: 0,
|
||||
pre: ['nightly', 12345678]
|
||||
};
|
||||
|
||||
const version = utils.makeVersion(components, '.', utils.preType.PARTIAL);
|
||||
expect(version).to.equal('2.0.0.12345678');
|
||||
});
|
||||
|
||||
it('makes a version with a period delimiter and a full pre', () => {
|
||||
const components = {
|
||||
major: 2,
|
||||
minor: 0,
|
||||
patch: 0,
|
||||
pre: ['nightly', 12345678]
|
||||
};
|
||||
|
||||
const version = utils.makeVersion(components, '.', utils.preType.FULL);
|
||||
expect(version).to.equal('2.0.0-nightly.12345678');
|
||||
});
|
||||
});
|
||||
|
||||
ifdescribe(!(process.platform === 'linux' && process.arch.indexOf('arm') === 0) && process.platform !== 'darwin')('nextVersion', () => {
|
||||
describe('bump versions', () => {
|
||||
const nightlyPattern = /[0-9.]*(-nightly.(\d{4})(\d{2})(\d{2}))$/g;
|
||||
|
@ -183,6 +145,7 @@ describe('version-bumper', () => {
|
|||
it('throws on an invalid bump type', () => {
|
||||
const version = 'v2.0.0';
|
||||
return expect(
|
||||
// @ts-expect-error 'WRONG' is not a valid bump type
|
||||
nextVersion('WRONG', version)
|
||||
).to.be.rejectedWith('Invalid bump type.');
|
||||
});
|
||||
|
|
148
yarn.lock
148
yarn.lock
|
@ -456,18 +456,10 @@
|
|||
dependencies:
|
||||
"@octokit/types" "^9.0.0"
|
||||
|
||||
"@octokit/core@^4.1.0":
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-4.2.0.tgz#8c253ba9605aca605bc46187c34fcccae6a96648"
|
||||
integrity sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg==
|
||||
dependencies:
|
||||
"@octokit/auth-token" "^3.0.0"
|
||||
"@octokit/graphql" "^5.0.0"
|
||||
"@octokit/request" "^6.0.0"
|
||||
"@octokit/request-error" "^3.0.0"
|
||||
"@octokit/types" "^9.0.0"
|
||||
before-after-hook "^2.2.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
"@octokit/auth-token@^4.0.0":
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7"
|
||||
integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==
|
||||
|
||||
"@octokit/core@^4.2.1":
|
||||
version "4.2.1"
|
||||
|
@ -482,6 +474,19 @@
|
|||
before-after-hook "^2.2.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/core@^5.0.2":
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea"
|
||||
integrity sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==
|
||||
dependencies:
|
||||
"@octokit/auth-token" "^4.0.0"
|
||||
"@octokit/graphql" "^7.1.0"
|
||||
"@octokit/request" "^8.3.1"
|
||||
"@octokit/request-error" "^5.1.0"
|
||||
"@octokit/types" "^13.0.0"
|
||||
before-after-hook "^2.2.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/endpoint@^7.0.0":
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.3.tgz#0b96035673a9e3bedf8bab8f7335de424a2147ed"
|
||||
|
@ -491,6 +496,14 @@
|
|||
is-plain-object "^5.0.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/endpoint@^9.0.1":
|
||||
version "9.0.5"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.5.tgz#e6c0ee684e307614c02fc6ac12274c50da465c44"
|
||||
integrity sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==
|
||||
dependencies:
|
||||
"@octokit/types" "^13.1.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/graphql@^5.0.0":
|
||||
version "5.0.5"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.5.tgz#a4cb3ea73f83b861893a6370ee82abb36e81afd2"
|
||||
|
@ -500,6 +513,15 @@
|
|||
"@octokit/types" "^9.0.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/graphql@^7.1.0":
|
||||
version "7.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.0.tgz#9bc1c5de92f026648131f04101cab949eeffe4e0"
|
||||
integrity sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==
|
||||
dependencies:
|
||||
"@octokit/request" "^8.3.0"
|
||||
"@octokit/types" "^13.0.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/oauth-authorization-url@^5.0.0":
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-5.0.0.tgz#029626ce87f3b31addb98cd0d2355c2381a1c5a1"
|
||||
|
@ -531,12 +553,17 @@
|
|||
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-17.2.0.tgz#f1800b5f9652b8e1b85cc6dfb1e0dc888810bdb5"
|
||||
integrity sha512-MazrFNx4plbLsGl+LFesMo96eIXkFgEtaKbnNpdh4aQ0VM10aoylFsTYP1AEjkeoRNZiiPe3T6Gl2Hr8dJWdlQ==
|
||||
|
||||
"@octokit/plugin-paginate-rest@^6.0.0":
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.0.0.tgz#f34b5a7d9416019126042cd7d7b811e006c0d561"
|
||||
integrity sha512-Sq5VU1PfT6/JyuXPyt04KZNVsFOSBaYOAq2QRZUwzVlI10KFvcbUo8lR258AAQL1Et60b0WuVik+zOWKLuDZxw==
|
||||
"@octokit/openapi-types@^22.2.0":
|
||||
version "22.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-22.2.0.tgz#75aa7dcd440821d99def6a60b5f014207ae4968e"
|
||||
integrity sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==
|
||||
|
||||
"@octokit/plugin-paginate-rest@11.3.1":
|
||||
version "11.3.1"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.1.tgz#fe92d04b49f134165d6fbb716e765c2f313ad364"
|
||||
integrity sha512-ryqobs26cLtM1kQxqeZui4v8FeznirUsksiA+RYemMPJ7Micju0WSkv50dBksTuZks9O5cg4wp+t8fZ/cLY56g==
|
||||
dependencies:
|
||||
"@octokit/types" "^9.0.0"
|
||||
"@octokit/types" "^13.5.0"
|
||||
|
||||
"@octokit/plugin-paginate-rest@^6.1.2":
|
||||
version "6.1.2"
|
||||
|
@ -551,13 +578,17 @@
|
|||
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz#5e50ed7083a613816b1e4a28aeec5fb7f1462e85"
|
||||
integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods@^7.0.0":
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.0.1.tgz#f7ebe18144fd89460f98f35a587b056646e84502"
|
||||
integrity sha512-pnCaLwZBudK5xCdrR823xHGNgqOzRnJ/mpC/76YPpNP7DybdsJtP7mdOwh+wYZxK5jqeQuhu59ogMI4NRlBUvA==
|
||||
"@octokit/plugin-request-log@^4.0.0":
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958"
|
||||
integrity sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods@13.2.2":
|
||||
version "13.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.2.tgz#af8e5dd2cddfea576f92ffaf9cb84659f302a638"
|
||||
integrity sha512-EI7kXWidkt3Xlok5uN43suK99VWqc8OaIMktY9d9+RNKl69juoTyxmLoWPIZgJYzi41qj/9zU7G/ljnNOJ5AFA==
|
||||
dependencies:
|
||||
"@octokit/types" "^9.0.0"
|
||||
deprecation "^2.3.1"
|
||||
"@octokit/types" "^13.5.0"
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods@^7.1.2":
|
||||
version "7.1.2"
|
||||
|
@ -576,6 +607,15 @@
|
|||
deprecation "^2.0.0"
|
||||
once "^1.4.0"
|
||||
|
||||
"@octokit/request-error@^5.1.0":
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30"
|
||||
integrity sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==
|
||||
dependencies:
|
||||
"@octokit/types" "^13.1.0"
|
||||
deprecation "^2.0.0"
|
||||
once "^1.4.0"
|
||||
|
||||
"@octokit/request@^6.0.0":
|
||||
version "6.2.4"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.4.tgz#b00a7185865c72bdd432e63168b1e900953ded0c"
|
||||
|
@ -588,6 +628,16 @@
|
|||
node-fetch "^2.6.7"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/request@^8.3.0", "@octokit/request@^8.3.1":
|
||||
version "8.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-8.4.0.tgz#7f4b7b1daa3d1f48c0977ad8fffa2c18adef8974"
|
||||
integrity sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==
|
||||
dependencies:
|
||||
"@octokit/endpoint" "^9.0.1"
|
||||
"@octokit/request-error" "^5.1.0"
|
||||
"@octokit/types" "^13.1.0"
|
||||
universal-user-agent "^6.0.0"
|
||||
|
||||
"@octokit/rest@^19.0.11":
|
||||
version "19.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.11.tgz#2ae01634fed4bd1fca5b642767205ed3fd36177c"
|
||||
|
@ -598,21 +648,28 @@
|
|||
"@octokit/plugin-request-log" "^1.0.4"
|
||||
"@octokit/plugin-rest-endpoint-methods" "^7.1.2"
|
||||
|
||||
"@octokit/rest@^19.0.7":
|
||||
version "19.0.7"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.7.tgz#d2e21b4995ab96ae5bfae50b4969da7e04e0bb70"
|
||||
integrity sha512-HRtSfjrWmWVNp2uAkEpQnuGMJsu/+dBr47dRc5QVgsCbnIc1+GFEaoKBWkYG+zjrsHpSqcAElMio+n10c0b5JA==
|
||||
"@octokit/rest@^20.0.2":
|
||||
version "20.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-20.1.1.tgz#ec775864f53fb42037a954b9a40d4f5275b3dc95"
|
||||
integrity sha512-MB4AYDsM5jhIHro/dq4ix1iWTLGToIGk6cWF5L6vanFaMble5jTX/UBQyiv05HsWnwUtY8JrfHy2LWfKwihqMw==
|
||||
dependencies:
|
||||
"@octokit/core" "^4.1.0"
|
||||
"@octokit/plugin-paginate-rest" "^6.0.0"
|
||||
"@octokit/plugin-request-log" "^1.0.4"
|
||||
"@octokit/plugin-rest-endpoint-methods" "^7.0.0"
|
||||
"@octokit/core" "^5.0.2"
|
||||
"@octokit/plugin-paginate-rest" "11.3.1"
|
||||
"@octokit/plugin-request-log" "^4.0.0"
|
||||
"@octokit/plugin-rest-endpoint-methods" "13.2.2"
|
||||
|
||||
"@octokit/tsconfig@^1.0.2":
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/tsconfig/-/tsconfig-1.0.2.tgz#59b024d6f3c0ed82f00d08ead5b3750469125af7"
|
||||
integrity sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==
|
||||
|
||||
"@octokit/types@^13.0.0", "@octokit/types@^13.1.0", "@octokit/types@^13.5.0":
|
||||
version "13.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.6.0.tgz#db13d345cc3fe1a0f7c07171c724d90f2b55f410"
|
||||
integrity sha512-CrooV/vKCXqwLa+osmHLIMUb87brpgUqlqkPGc6iE2wCkUvTrHiXFMhAKoDDaAAYJrtKtrFTgSQTg5nObBEaew==
|
||||
dependencies:
|
||||
"@octokit/openapi-types" "^22.2.0"
|
||||
|
||||
"@octokit/types@^8.0.0":
|
||||
version "8.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-8.0.0.tgz#93f0b865786c4153f0f6924da067fe0bb7426a9f"
|
||||
|
@ -2239,18 +2296,6 @@ doctrine@^3.0.0:
|
|||
dependencies:
|
||||
esutils "^2.0.2"
|
||||
|
||||
dotenv-safe@^4.0.4:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/dotenv-safe/-/dotenv-safe-4.0.4.tgz#8b0e7ced8e70b1d3c5d874ef9420e406f39425b3"
|
||||
integrity sha1-iw587Y5wsdPF2HTvlCDkBvOUJbM=
|
||||
dependencies:
|
||||
dotenv "^4.0.0"
|
||||
|
||||
dotenv@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-4.0.0.tgz#864ef1379aced55ce6f95debecdce179f7a0cd1d"
|
||||
integrity sha1-hk7xN5rO1Vzm+V3r7NzhefegzR0=
|
||||
|
||||
dugite@^2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/dugite/-/dugite-2.3.0.tgz#ff6fdb4c899f84ed6695c9e01eaf4364a6211f13"
|
||||
|
@ -6686,7 +6731,7 @@ stringify-object@^3.3.0:
|
|||
is-obj "^1.0.1"
|
||||
is-regexp "^1.0.0"
|
||||
|
||||
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
|
||||
"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
|
||||
|
@ -6707,13 +6752,6 @@ strip-ansi@^6.0.0:
|
|||
dependencies:
|
||||
ansi-regex "^5.0.0"
|
||||
|
||||
strip-ansi@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
|
||||
dependencies:
|
||||
ansi-regex "^5.0.1"
|
||||
|
||||
strip-ansi@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.0.tgz#1dc49b980c3a4100366617adac59327eefdefcb0"
|
||||
|
@ -7050,10 +7088,10 @@ typedarray@^0.0.6:
|
|||
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
|
||||
integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
|
||||
|
||||
typescript@^5.1.2:
|
||||
version "5.1.3"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.3.tgz#8d84219244a6b40b6fb2b33cc1c062f715b9e826"
|
||||
integrity sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==
|
||||
typescript@^5.6.2:
|
||||
version "5.6.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.2.tgz#d1de67b6bef77c41823f822df8f0b3bcff60a5a0"
|
||||
integrity sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==
|
||||
|
||||
uc.micro@^1.0.1, uc.micro@^1.0.5:
|
||||
version "1.0.6"
|
||||
|
|
Loading…
Reference in a new issue