build: convert all release scripts to typescript (#44060)
* build: run gha on tag not branch (#42490) * build: convert all release scripts to typescript (#44035) * build: convert all release scripts to typescript * fix test imports * build: fix version bumper export * refactor: use as const * spec: fix bad type spec * build: use ts-node to spawn the version-bumper (#44057) Missed this in the tsification, we should probably call this via API instead of spawning a sub-proc? * build: still colors
This commit is contained in:
parent
956677b66a
commit
2e84985439
22 changed files with 1173 additions and 779 deletions
|
@ -1,22 +1,22 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { GitProcess } = require('dugite');
|
||||
const minimist = require('minimist');
|
||||
const path = require('node:path');
|
||||
const semver = require('semver');
|
||||
import { GitProcess } from 'dugite';
|
||||
import { basename } from 'node:path';
|
||||
import { valid, compare, gte, lte } from 'semver';
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const notesGenerator = require('./notes.js');
|
||||
import { ELECTRON_DIR } from '../../lib/utils';
|
||||
import { get, render } from './notes';
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const { createGitHubTokenStrategy } = require('../github-token');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { createGitHubTokenStrategy } from '../github-token';
|
||||
import { parseArgs } from 'node:util';
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy('electron')
|
||||
});
|
||||
|
||||
const semverify = version => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.');
|
||||
const semverify = (version: string) => version.replace(/^origin\//, '').replace(/[xy]/g, '0').replace(/-/g, '.');
|
||||
|
||||
const runGit = async (args) => {
|
||||
const runGit = async (args: string[]) => {
|
||||
console.info(`Running: git ${args.join(' ')}`);
|
||||
const response = await GitProcess.exec(args, ELECTRON_DIR);
|
||||
if (response.exitCode !== 0) {
|
||||
|
@ -25,25 +25,25 @@ const runGit = async (args) => {
|
|||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const tagIsSupported = tag => tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsAlpha = tag => tag && tag.includes('alpha');
|
||||
const tagIsBeta = tag => tag && tag.includes('beta');
|
||||
const tagIsStable = tag => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag);
|
||||
const tagIsSupported = (tag: string) => !!tag && !tag.includes('nightly') && !tag.includes('unsupported');
|
||||
const tagIsAlpha = (tag: string) => !!tag && tag.includes('alpha');
|
||||
const tagIsBeta = (tag: string) => !!tag && tag.includes('beta');
|
||||
const tagIsStable = (tag: string) => tagIsSupported(tag) && !tagIsBeta(tag) && !tagIsAlpha(tag);
|
||||
|
||||
const getTagsOf = async (point) => {
|
||||
const getTagsOf = async (point: string) => {
|
||||
try {
|
||||
const tags = await runGit(['tag', '--merged', point]);
|
||||
return tags.split('\n')
|
||||
.map(tag => tag.trim())
|
||||
.filter(tag => semver.valid(tag))
|
||||
.sort(semver.compare);
|
||||
.filter(tag => valid(tag))
|
||||
.sort(compare);
|
||||
} catch (err) {
|
||||
console.error(`Failed to fetch tags for point ${point}`);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const getTagsOnBranch = async (point) => {
|
||||
const getTagsOnBranch = async (point: string) => {
|
||||
const { data: { default_branch: defaultBranch } } = await octokit.repos.get({
|
||||
owner: 'electron',
|
||||
repo: 'electron'
|
||||
|
@ -57,7 +57,7 @@ const getTagsOnBranch = async (point) => {
|
|||
return (await getTagsOf(point)).filter(tag => !mainTagsSet.has(tag));
|
||||
};
|
||||
|
||||
const getBranchOf = async (point) => {
|
||||
const getBranchOf = async (point: string) => {
|
||||
try {
|
||||
const branches = (await runGit(['branch', '-a', '--contains', point]))
|
||||
.split('\n')
|
||||
|
@ -89,11 +89,11 @@ const getStabilizationBranches = async () => {
|
|||
return (await getAllBranches()).filter(branch => /^origin\/\d+-x-y$/.test(branch));
|
||||
};
|
||||
|
||||
const getPreviousStabilizationBranch = async (current) => {
|
||||
const getPreviousStabilizationBranch = async (current: string) => {
|
||||
const stabilizationBranches = (await getStabilizationBranches())
|
||||
.filter(branch => branch !== current && branch !== `origin/${current}`);
|
||||
|
||||
if (!semver.valid(current)) {
|
||||
if (!valid(current)) {
|
||||
// since we don't seem to be on a stabilization branch right now,
|
||||
// pick a placeholder name that will yield the newest branch
|
||||
// as a comparison point.
|
||||
|
@ -102,20 +102,20 @@ const getPreviousStabilizationBranch = async (current) => {
|
|||
|
||||
let newestMatch = null;
|
||||
for (const branch of stabilizationBranches) {
|
||||
if (semver.gte(semverify(branch), semverify(current))) {
|
||||
if (gte(semverify(branch), semverify(current))) {
|
||||
continue;
|
||||
}
|
||||
if (newestMatch && semver.lte(semverify(branch), semverify(newestMatch))) {
|
||||
if (newestMatch && lte(semverify(branch), semverify(newestMatch))) {
|
||||
continue;
|
||||
}
|
||||
newestMatch = branch;
|
||||
}
|
||||
return newestMatch;
|
||||
return newestMatch!;
|
||||
};
|
||||
|
||||
const getPreviousPoint = async (point) => {
|
||||
const getPreviousPoint = async (point: string) => {
|
||||
const currentBranch = await getBranchOf(point);
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop();
|
||||
const currentTag = (await getTagsOf(point)).filter(tag => tagIsSupported(tag)).pop()!;
|
||||
const currentIsStable = tagIsStable(currentTag);
|
||||
|
||||
try {
|
||||
|
@ -146,18 +146,18 @@ const getPreviousPoint = async (point) => {
|
|||
}
|
||||
};
|
||||
|
||||
async function getReleaseNotes (range, newVersion, unique) {
|
||||
async function getReleaseNotes (range: string, newVersion?: string, unique?: boolean) {
|
||||
const rangeList = range.split('..') || ['HEAD'];
|
||||
const to = rangeList.pop();
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to));
|
||||
const to = rangeList.pop()!;
|
||||
const from = rangeList.pop() || (await getPreviousPoint(to))!;
|
||||
|
||||
if (!newVersion) {
|
||||
newVersion = to;
|
||||
}
|
||||
|
||||
const notes = await notesGenerator.get(from, to, newVersion);
|
||||
const ret = {
|
||||
text: notesGenerator.render(notes, unique)
|
||||
const notes = await get(from, to, newVersion);
|
||||
const ret: { text: string; warning?: string; } = {
|
||||
text: render(notes, unique)
|
||||
};
|
||||
|
||||
if (notes.unknown.length) {
|
||||
|
@ -168,13 +168,24 @@ async function getReleaseNotes (range, newVersion, unique) {
|
|||
}
|
||||
|
||||
async function main () {
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['help', 'unique'],
|
||||
string: ['version']
|
||||
const { values: { help, unique, version }, positionals } = parseArgs({
|
||||
options: {
|
||||
help: {
|
||||
type: 'boolean'
|
||||
},
|
||||
unique: {
|
||||
type: 'boolean'
|
||||
},
|
||||
version: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
allowPositionals: true
|
||||
});
|
||||
opts.range = opts._.shift();
|
||||
if (opts.help || !opts.range) {
|
||||
const name = path.basename(process.argv[1]);
|
||||
|
||||
const range = positionals.shift();
|
||||
if (help || !range) {
|
||||
const name = basename(process.argv[1]);
|
||||
console.log(`
|
||||
easy usage: ${name} version
|
||||
|
||||
|
@ -194,7 +205,7 @@ For example, these invocations are equivalent:
|
|||
return 0;
|
||||
}
|
||||
|
||||
const notes = await getReleaseNotes(opts.range, opts.version, opts.unique);
|
||||
const notes = await getReleaseNotes(range, version, unique);
|
||||
console.log(notes.text);
|
||||
if (notes.warning) {
|
||||
throw new Error(notes.warning);
|
||||
|
@ -208,4 +219,4 @@ if (require.main === module) {
|
|||
});
|
||||
}
|
||||
|
||||
module.exports = getReleaseNotes;
|
||||
export default getReleaseNotes;
|
|
@ -1,16 +1,13 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
|
||||
import { resolve as _resolve } from 'node:path';
|
||||
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { GitProcess } from 'dugite';
|
||||
|
||||
const { GitProcess } = require('dugite');
|
||||
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
|
||||
const { ELECTRON_DIR } = require('../../lib/utils');
|
||||
const { createGitHubTokenStrategy } = require('../github-token');
|
||||
import { ELECTRON_DIR } from '../../lib/utils';
|
||||
import { createGitHubTokenStrategy } from '../github-token';
|
||||
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createGitHubTokenStrategy('electron')
|
||||
|
@ -26,24 +23,52 @@ const NO_NOTES = 'No notes';
|
|||
const docTypes = new Set(['doc', 'docs']);
|
||||
const featTypes = new Set(['feat', 'feature']);
|
||||
const fixTypes = new Set(['fix']);
|
||||
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'perf', 'style', 'ci']);
|
||||
const knownTypes = new Set([...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]);
|
||||
const otherTypes = new Set([
|
||||
'spec',
|
||||
'build',
|
||||
'test',
|
||||
'chore',
|
||||
'deps',
|
||||
'refactor',
|
||||
'tools',
|
||||
'perf',
|
||||
'style',
|
||||
'ci'
|
||||
]);
|
||||
const knownTypes = new Set([
|
||||
...docTypes.keys(),
|
||||
...featTypes.keys(),
|
||||
...fixTypes.keys(),
|
||||
...otherTypes.keys()
|
||||
]);
|
||||
|
||||
const getCacheDir = () => process.env.NOTES_CACHE_PATH || path.resolve(__dirname, '.cache');
|
||||
const getCacheDir = () =>
|
||||
process.env.NOTES_CACHE_PATH || _resolve(__dirname, '.cache');
|
||||
|
||||
/**
|
||||
***
|
||||
**/
|
||||
***
|
||||
**/
|
||||
|
||||
type MinimalPR = {
|
||||
title: string;
|
||||
body: string | null;
|
||||
number: number;
|
||||
labels: {
|
||||
name: string;
|
||||
}[];
|
||||
base: { repo: { name: string; owner: { login: string } } };
|
||||
};
|
||||
|
||||
// link to a GitHub item, e.g. an issue or pull request
|
||||
class GHKey {
|
||||
constructor (owner, repo, number) {
|
||||
this.owner = owner;
|
||||
this.repo = repo;
|
||||
this.number = number;
|
||||
}
|
||||
// eslint-disable-next-line no-useless-constructor
|
||||
constructor (
|
||||
public readonly owner: string,
|
||||
public readonly repo: string,
|
||||
public readonly number: number
|
||||
) {}
|
||||
|
||||
static NewFromPull (pull) {
|
||||
static NewFromPull (pull: MinimalPR) {
|
||||
const owner = pull.base.repo.owner.login;
|
||||
const repo = pull.base.repo.name;
|
||||
const number = pull.number;
|
||||
|
@ -52,38 +77,33 @@ class GHKey {
|
|||
}
|
||||
|
||||
class Commit {
|
||||
constructor (hash, owner, repo) {
|
||||
this.hash = hash; // string
|
||||
this.owner = owner; // string
|
||||
this.repo = repo; // string
|
||||
public isBreakingChange = false;
|
||||
public note: string | null = null;
|
||||
public trops = new Map<string, GHKey>();
|
||||
public readonly prKeys = new Set<GHKey>();
|
||||
public revertHash: string | null = null;
|
||||
public semanticType: string | null = null;
|
||||
public subject: string | null = null;
|
||||
|
||||
this.isBreakingChange = false;
|
||||
this.note = null; // string
|
||||
|
||||
// A set of branches to which this change has been merged.
|
||||
// '8-x-y' => GHKey { owner: 'electron', repo: 'electron', number: 23714 }
|
||||
this.trops = new Map(); // Map<string,GHKey>
|
||||
|
||||
this.prKeys = new Set(); // GHKey
|
||||
this.revertHash = null; // string
|
||||
this.semanticType = null; // string
|
||||
this.subject = null; // string
|
||||
}
|
||||
// eslint-disable-next-line no-useless-constructor
|
||||
constructor (
|
||||
public readonly hash: string,
|
||||
public readonly owner: string,
|
||||
public readonly repo: string
|
||||
) {}
|
||||
}
|
||||
|
||||
class Pool {
|
||||
constructor () {
|
||||
this.commits = []; // Array<Commit>
|
||||
this.processedHashes = new Set();
|
||||
this.pulls = {}; // GHKey.number => octokit pull object
|
||||
}
|
||||
public commits: Commit[] = [];
|
||||
public processedHashes = new Set<string>();
|
||||
public pulls: Record<number, MinimalPR> = Object.create(null);
|
||||
}
|
||||
|
||||
/**
|
||||
***
|
||||
**/
|
||||
***
|
||||
**/
|
||||
|
||||
const runGit = async (dir, args) => {
|
||||
const runGit = async (dir: string, args: string[]) => {
|
||||
const response = await GitProcess.exec(args, dir);
|
||||
if (response.exitCode !== 0) {
|
||||
throw new Error(response.stderr.trim());
|
||||
|
@ -91,11 +111,15 @@ const runGit = async (dir, args) => {
|
|||
return response.stdout.trim();
|
||||
};
|
||||
|
||||
const getCommonAncestor = async (dir, point1, point2) => {
|
||||
const getCommonAncestor = async (
|
||||
dir: string,
|
||||
point1: string,
|
||||
point2: string
|
||||
) => {
|
||||
return runGit(dir, ['merge-base', point1, point2]);
|
||||
};
|
||||
|
||||
const getNoteFromClerk = async (ghKey) => {
|
||||
const getNoteFromClerk = async (ghKey: GHKey) => {
|
||||
const comments = await getComments(ghKey);
|
||||
if (!comments || !comments.data) return;
|
||||
|
||||
|
@ -105,28 +129,29 @@ const getNoteFromClerk = async (ghKey) => {
|
|||
const QUOTE_LEAD = '> ';
|
||||
|
||||
for (const comment of comments.data.reverse()) {
|
||||
if (comment.user.login !== CLERK_LOGIN) {
|
||||
if (comment.user?.login !== CLERK_LOGIN) {
|
||||
continue;
|
||||
}
|
||||
if (comment.body === CLERK_NO_NOTES) {
|
||||
return NO_NOTES;
|
||||
}
|
||||
if (comment.body.startsWith(PERSIST_LEAD)) {
|
||||
if (comment.body?.startsWith(PERSIST_LEAD)) {
|
||||
let lines = comment.body
|
||||
.slice(PERSIST_LEAD.length).trim() // remove PERSIST_LEAD
|
||||
.slice(PERSIST_LEAD.length)
|
||||
.trim() // remove PERSIST_LEAD
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.map(line => line.replace('<', '<'))
|
||||
.map(line => line.replace('>', '>'))
|
||||
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map(line => line.slice(QUOTE_LEAD.length)); // unquote the lines
|
||||
.map((line) => line.trim())
|
||||
.map((line) => line.replace('<', '<'))
|
||||
.map((line) => line.replace('>', '>'))
|
||||
.filter((line) => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
||||
.map((line) => line.slice(QUOTE_LEAD.length)); // unquote the lines
|
||||
|
||||
const firstLine = lines.shift();
|
||||
// indent anything after the first line to ensure that
|
||||
// multiline notes with their own sub-lists don't get
|
||||
// parsed in the markdown as part of the top-level list
|
||||
// (example: https://github.com/electron/electron/pull/25216)
|
||||
lines = lines.map(line => ' ' + line);
|
||||
lines = lines.map((line) => ' ' + line);
|
||||
return [firstLine, ...lines]
|
||||
.join('\n') // join the lines
|
||||
.trim();
|
||||
|
@ -146,7 +171,7 @@ const getNoteFromClerk = async (ghKey) => {
|
|||
* line starting with 'BREAKING CHANGE' in body -- sets isBreakingChange
|
||||
* 'Backport of #99999' -- sets pr
|
||||
*/
|
||||
const parseCommitMessage = (commitMessage, commit) => {
|
||||
const parseCommitMessage = (commitMessage: string, commit: Commit) => {
|
||||
const { owner, repo } = commit;
|
||||
|
||||
// split commitMessage into subject & body
|
||||
|
@ -180,23 +205,32 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|||
}
|
||||
|
||||
// Check for a comment that indicates a PR
|
||||
const backportPattern = /(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im;
|
||||
const backportPattern =
|
||||
/(?:^|\n)(?:manual |manually )?backport.*(?:#(\d+)|\/pull\/(\d+))/im;
|
||||
if ((match = commitMessage.match(backportPattern))) {
|
||||
// This might be the first or second capture group depending on if it's a link or not.
|
||||
const backportNumber = match[1] ? parseInt(match[1], 10) : parseInt(match[2], 10);
|
||||
const backportNumber = match[1]
|
||||
? parseInt(match[1], 10)
|
||||
: parseInt(match[2], 10);
|
||||
commit.prKeys.add(new GHKey(owner, repo, backportNumber));
|
||||
}
|
||||
|
||||
// https://help.github.com/articles/closing-issues-using-keywords/
|
||||
if (body.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i)) {
|
||||
if (
|
||||
body.match(
|
||||
/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i
|
||||
)
|
||||
) {
|
||||
commit.semanticType = commit.semanticType || 'fix';
|
||||
}
|
||||
|
||||
// https://www.conventionalcommits.org/en
|
||||
if (commitMessage
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(line => line.trim())
|
||||
.some(line => line.startsWith('BREAKING CHANGE'))) {
|
||||
if (
|
||||
commitMessage
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map((line) => line.trim())
|
||||
.some((line) => line.startsWith('BREAKING CHANGE'))
|
||||
) {
|
||||
commit.isBreakingChange = true;
|
||||
}
|
||||
|
||||
|
@ -209,76 +243,109 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|||
return commit;
|
||||
};
|
||||
|
||||
const parsePullText = (pull, commit) => parseCommitMessage(`${pull.data.title}\n\n${pull.data.body}`, commit);
|
||||
const parsePullText = (pull: MinimalPR, commit: Commit) =>
|
||||
parseCommitMessage(`${pull.title}\n\n${pull.body}`, commit);
|
||||
|
||||
const getLocalCommitHashes = async (dir, ref) => {
|
||||
const getLocalCommitHashes = async (dir: string, ref: string) => {
|
||||
const args = ['log', '--format=%H', ref];
|
||||
return (await runGit(dir, args))
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(hash => hash.trim());
|
||||
.map((hash) => hash.trim());
|
||||
};
|
||||
|
||||
// return an array of Commits
|
||||
const getLocalCommits = async (module, point1, point2) => {
|
||||
const getLocalCommits = async (
|
||||
module: LocalRepo,
|
||||
point1: string,
|
||||
point2: string
|
||||
) => {
|
||||
const { owner, repo, dir } = module;
|
||||
|
||||
const fieldSep = ',';
|
||||
const format = ['%H', '%s'].join(fieldSep);
|
||||
const args = ['log', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
||||
const args = [
|
||||
'log',
|
||||
'--cherry-pick',
|
||||
'--right-only',
|
||||
'--first-parent',
|
||||
`--format=${format}`,
|
||||
`${point1}..${point2}`
|
||||
];
|
||||
const logs = (await runGit(dir, args))
|
||||
.split(/\r?\n/) // split into lines
|
||||
.map(field => field.trim());
|
||||
.map((field) => field.trim());
|
||||
|
||||
const commits = [];
|
||||
for (const log of logs) {
|
||||
if (!log) {
|
||||
continue;
|
||||
}
|
||||
const [hash, subject] = log.split(fieldSep, 2).map(field => field.trim());
|
||||
const [hash, subject] = log.split(fieldSep, 2).map((field) => field.trim());
|
||||
commits.push(parseCommitMessage(subject, new Commit(hash, owner, repo)));
|
||||
}
|
||||
return commits;
|
||||
};
|
||||
|
||||
const checkCache = async (name, operation) => {
|
||||
const filename = path.resolve(getCacheDir(), name);
|
||||
if (fs.existsSync(filename)) {
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
||||
const checkCache = async <T>(
|
||||
name: string,
|
||||
operation: () => Promise<T>
|
||||
): Promise<T> => {
|
||||
const filename = _resolve(getCacheDir(), name);
|
||||
if (existsSync(filename)) {
|
||||
return JSON.parse(readFileSync(filename, 'utf8'));
|
||||
}
|
||||
process.stdout.write('.');
|
||||
const response = await operation();
|
||||
if (response) {
|
||||
fs.writeFileSync(filename, JSON.stringify(response));
|
||||
writeFileSync(filename, JSON.stringify(response));
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
// helper function to add some resiliency to volatile GH api endpoints
|
||||
async function runRetryable (fn, maxRetries) {
|
||||
let lastError;
|
||||
async function runRetryable<T> (
|
||||
fn: () => Promise<T>,
|
||||
maxRetries: number
|
||||
): Promise<T | null> {
|
||||
let lastError: Error & { status?: number };
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
await new Promise(resolve => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error;
|
||||
await new Promise((resolve) => setTimeout(resolve, CHECK_INTERVAL));
|
||||
lastError = error as any;
|
||||
}
|
||||
}
|
||||
// Silently eat 404s.
|
||||
// Silently eat 422s, which come from "No commit found for SHA"
|
||||
if (lastError.status !== 404 && lastError.status !== 422) throw lastError;
|
||||
// eslint-disable-next-line no-throw-literal
|
||||
if (lastError!.status !== 404 && lastError!.status !== 422) throw lastError!;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const getPullCacheFilename = ghKey => `${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
|
||||
const getPullCacheFilename = (ghKey: GHKey) =>
|
||||
`${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
|
||||
|
||||
const getCommitPulls = async (owner, repo, hash) => {
|
||||
const getCommitPulls = async (owner: string, repo: string, hash: string) => {
|
||||
const name = `${owner}-${repo}-commit-${hash}`;
|
||||
const retryableFunc = () => octokit.repos.listPullRequestsAssociatedWithCommit({ owner, repo, commit_sha: hash });
|
||||
let ret = await checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
const retryableFunc = async () => {
|
||||
const { data } = await octokit.repos.listPullRequestsAssociatedWithCommit({
|
||||
owner,
|
||||
repo,
|
||||
commit_sha: hash
|
||||
});
|
||||
return {
|
||||
data
|
||||
};
|
||||
};
|
||||
let ret = await checkCache(name, () =>
|
||||
runRetryable(retryableFunc, MAX_FAIL_COUNT)
|
||||
);
|
||||
|
||||
// only merged pulls belong in release notes
|
||||
if (ret && ret.data) {
|
||||
ret.data = ret.data.filter(pull => pull.merged_at);
|
||||
ret.data = ret.data.filter((pull) => pull.merged_at);
|
||||
}
|
||||
|
||||
// cache the pulls
|
||||
|
@ -286,7 +353,7 @@ const getCommitPulls = async (owner, repo, hash) => {
|
|||
for (const pull of ret.data) {
|
||||
const cachefile = getPullCacheFilename(GHKey.NewFromPull(pull));
|
||||
const payload = { ...ret, data: pull };
|
||||
await checkCache(cachefile, () => payload);
|
||||
await checkCache(cachefile, async () => payload);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -298,21 +365,39 @@ const getCommitPulls = async (owner, repo, hash) => {
|
|||
return ret;
|
||||
};
|
||||
|
||||
const getPullRequest = async (ghKey) => {
|
||||
const getPullRequest = async (ghKey: GHKey) => {
|
||||
const { number, owner, repo } = ghKey;
|
||||
const name = getPullCacheFilename(ghKey);
|
||||
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
const retryableFunc = () =>
|
||||
octokit.pulls.get({ pull_number: number, owner, repo });
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const getComments = async (ghKey) => {
|
||||
const getComments = async (ghKey: GHKey) => {
|
||||
const { number, owner, repo } = ghKey;
|
||||
const name = `${owner}-${repo}-issue-${number}-comments`;
|
||||
const retryableFunc = () => octokit.issues.listComments({ issue_number: number, owner, repo, per_page: 100 });
|
||||
const retryableFunc = () =>
|
||||
octokit.issues.listComments({
|
||||
issue_number: number,
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100
|
||||
});
|
||||
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
||||
};
|
||||
|
||||
const addRepoToPool = async (pool, repo, from, to) => {
|
||||
type LocalRepo = {
|
||||
owner: string;
|
||||
repo: string;
|
||||
dir: string;
|
||||
};
|
||||
|
||||
const addRepoToPool = async (
|
||||
pool: Pool,
|
||||
repo: LocalRepo,
|
||||
from: string,
|
||||
to: string
|
||||
) => {
|
||||
const commonAncestor = await getCommonAncestor(repo.dir, from, to);
|
||||
|
||||
// mark the old branch's commits as old news
|
||||
|
@ -337,42 +422,59 @@ const addRepoToPool = async (pool, repo, from, to) => {
|
|||
for (prKey of commit.prKeys.values()) {
|
||||
const pull = await getPullRequest(prKey);
|
||||
if (!pull || !pull.data) continue; // couldn't get it
|
||||
pool.pulls[prKey.number] = pull;
|
||||
parsePullText(pull, commit);
|
||||
pool.pulls[prKey.number] = pull.data;
|
||||
parsePullText(pull.data, commit);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
type MinimalComment = {
|
||||
user: {
|
||||
login: string;
|
||||
} | null;
|
||||
body?: string;
|
||||
};
|
||||
|
||||
// @return Map<string,GHKey>
|
||||
// where the key is a branch name (e.g. '7-1-x' or '8-x-y')
|
||||
// and the value is a GHKey to the PR
|
||||
async function getMergedTrops (commit, pool) {
|
||||
async function getMergedTrops (commit: Commit, pool: Pool) {
|
||||
const branches = new Map();
|
||||
|
||||
for (const prKey of commit.prKeys.values()) {
|
||||
const pull = pool.pulls[prKey.number];
|
||||
const mergedBranches = new Set(
|
||||
((pull && pull.data && pull.data.labels) ? pull.data.labels : [])
|
||||
.map(label => ((label && label.name) ? label.name : '').match(/merged\/([0-9]+-[x0-9]-[xy0-9])/))
|
||||
.filter(match => match)
|
||||
.map(match => match[1])
|
||||
(pull && pull && pull.labels ? pull.labels : [])
|
||||
.map((label) =>
|
||||
(label && label.name ? label.name : '').match(
|
||||
/merged\/([0-9]+-[x0-9]-[xy0-9])/
|
||||
)
|
||||
)
|
||||
.filter((match) => !!match)
|
||||
.map((match) => match[1])
|
||||
);
|
||||
|
||||
if (mergedBranches.size > 0) {
|
||||
const isTropComment = (comment) => comment && comment.user && comment.user.login === TROP_LOGIN;
|
||||
const isTropComment = (comment: MinimalComment | null) =>
|
||||
comment && comment.user && comment.user.login === TROP_LOGIN;
|
||||
|
||||
const ghKey = GHKey.NewFromPull(pull.data);
|
||||
const backportRegex = /backported this PR to "(.*)",\s+please check out #(\d+)/;
|
||||
const getBranchNameAndPullKey = (comment) => {
|
||||
const match = ((comment && comment.body) ? comment.body : '').match(backportRegex);
|
||||
return match ? [match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))] : null;
|
||||
const ghKey = GHKey.NewFromPull(pull);
|
||||
const backportRegex =
|
||||
/backported this PR to "(.*)",\s+please check out #(\d+)/;
|
||||
const getBranchNameAndPullKey = (comment: MinimalComment) => {
|
||||
const match = (comment && comment.body ? comment.body : '').match(
|
||||
backportRegex
|
||||
);
|
||||
return match
|
||||
? <const>[match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))]
|
||||
: null;
|
||||
};
|
||||
|
||||
const comments = await getComments(ghKey);
|
||||
((comments && comments.data) ? comments.data : [])
|
||||
(comments && comments.data ? comments.data : [])
|
||||
.filter(isTropComment)
|
||||
.map(getBranchNameAndPullKey)
|
||||
.filter(pair => pair)
|
||||
.filter((pair) => !!pair)
|
||||
.filter(([branch]) => mergedBranches.has(branch))
|
||||
.forEach(([branch, key]) => branches.set(branch, key));
|
||||
}
|
||||
|
@ -383,36 +485,48 @@ async function getMergedTrops (commit, pool) {
|
|||
|
||||
// @return the shorthand name of the branch that `ref` is on,
|
||||
// e.g. a ref of '10.0.0-beta.1' will return '10-x-y'
|
||||
async function getBranchNameOfRef (ref, dir) {
|
||||
return (await runGit(dir, ['branch', '--all', '--contains', ref, '--sort', 'version:refname']))
|
||||
async function getBranchNameOfRef (ref: string, dir: string) {
|
||||
const result = await runGit(dir, [
|
||||
'branch',
|
||||
'--all',
|
||||
'--contains',
|
||||
ref,
|
||||
'--sort',
|
||||
'version:refname'
|
||||
]);
|
||||
return result
|
||||
.split(/\r?\n/) // split into lines
|
||||
.shift() // we sorted by refname and want the first result
|
||||
.match(/(?:\s?\*\s){0,1}(.*)/)[1] // if present, remove leading '* ' in case we're currently in that branch
|
||||
.match(/(?:.*\/)?(.*)/)[1] // 'remote/origins/10-x-y' -> '10-x-y'
|
||||
.shift()! // we sorted by refname and want the first result
|
||||
.match(/(?:\s?\*\s){0,1}(.*)/)![1] // if present, remove leading '* ' in case we're currently in that branch
|
||||
.match(/(?:.*\/)?(.*)/)![1] // 'remote/origins/10-x-y' -> '10-x-y'
|
||||
.trim();
|
||||
}
|
||||
|
||||
/***
|
||||
**** Main
|
||||
***/
|
||||
**** Main
|
||||
***/
|
||||
|
||||
const getNotes = async (fromRef, toRef, newVersion) => {
|
||||
const getNotes = async (fromRef: string, toRef: string, newVersion: string) => {
|
||||
const cacheDir = getCacheDir();
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir);
|
||||
if (!existsSync(cacheDir)) {
|
||||
mkdirSync(cacheDir);
|
||||
}
|
||||
|
||||
const pool = new Pool();
|
||||
const toBranch = await getBranchNameOfRef(toRef, ELECTRON_DIR);
|
||||
|
||||
console.log(`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`);
|
||||
console.log(
|
||||
`Generating release notes between '${fromRef}' and '${toRef}' for version '${newVersion}' in branch '${toBranch}'`
|
||||
);
|
||||
|
||||
// get the electron/electron commits
|
||||
const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_DIR };
|
||||
await addRepoToPool(pool, electron, fromRef, toRef);
|
||||
|
||||
// remove any old commits
|
||||
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash));
|
||||
pool.commits = pool.commits.filter(
|
||||
(commit) => !pool.processedHashes.has(commit.hash)
|
||||
);
|
||||
|
||||
// if a commit _and_ revert occurred in the unprocessed set, skip them both
|
||||
for (const commit of pool.commits) {
|
||||
|
@ -421,7 +535,7 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
continue;
|
||||
}
|
||||
|
||||
const revert = pool.commits.find(commit => commit.hash === revertHash);
|
||||
const revert = pool.commits.find((commit) => commit.hash === revertHash);
|
||||
if (!revert) {
|
||||
continue;
|
||||
}
|
||||
|
@ -438,15 +552,15 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
if (commit.note) {
|
||||
break;
|
||||
}
|
||||
commit.note = await getNoteFromClerk(prKey);
|
||||
commit.note = await getNoteFromClerk(prKey) || null;
|
||||
}
|
||||
}
|
||||
|
||||
// remove non-user-facing commits
|
||||
pool.commits = pool.commits
|
||||
.filter(commit => commit && commit.note)
|
||||
.filter(commit => commit.note !== NO_NOTES)
|
||||
.filter(commit => commit.note.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
|
||||
.filter((commit) => commit && commit.note)
|
||||
.filter((commit) => commit.note !== NO_NOTES)
|
||||
.filter((commit) => commit.note!.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
|
||||
|
||||
for (const commit of pool.commits) {
|
||||
commit.trops = await getMergedTrops(commit, pool);
|
||||
|
@ -455,12 +569,12 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
pool.commits = removeSupercededStackUpdates(pool.commits);
|
||||
|
||||
const notes = {
|
||||
breaking: [],
|
||||
docs: [],
|
||||
feat: [],
|
||||
fix: [],
|
||||
other: [],
|
||||
unknown: [],
|
||||
breaking: [] as Commit[],
|
||||
docs: [] as Commit[],
|
||||
feat: [] as Commit[],
|
||||
fix: [] as Commit[],
|
||||
other: [] as Commit[],
|
||||
unknown: [] as Commit[],
|
||||
name: newVersion,
|
||||
toBranch
|
||||
};
|
||||
|
@ -487,11 +601,13 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|||
return notes;
|
||||
};
|
||||
|
||||
const compareVersions = (v1, v2) => {
|
||||
const compareVersions = (v1: string, v2: string) => {
|
||||
const [split1, split2] = [v1.split('.'), v2.split('.')];
|
||||
|
||||
if (split1.length !== split2.length) {
|
||||
throw new Error(`Expected version strings to have same number of sections: ${split1} and ${split2}`);
|
||||
throw new Error(
|
||||
`Expected version strings to have same number of sections: ${split1} and ${split2}`
|
||||
);
|
||||
}
|
||||
for (let i = 0; i < split1.length; i++) {
|
||||
const p1 = parseInt(split1[i], 10);
|
||||
|
@ -505,13 +621,13 @@ const compareVersions = (v1, v2) => {
|
|||
return 0;
|
||||
};
|
||||
|
||||
const removeSupercededStackUpdates = (commits) => {
|
||||
const removeSupercededStackUpdates = (commits: Commit[]) => {
|
||||
const updateRegex = /^Updated ([a-zA-Z.]+) to v?([\d.]+)/;
|
||||
const notupdates = [];
|
||||
|
||||
const newest = {};
|
||||
const newest: Record<string, { commit: Commit; version: string }> = Object.create(null);
|
||||
for (const commit of commits) {
|
||||
const match = (commit.note || commit.subject).match(updateRegex);
|
||||
const match = (commit.note || commit.subject)?.match(updateRegex);
|
||||
if (!match) {
|
||||
notupdates.push(commit);
|
||||
continue;
|
||||
|
@ -523,48 +639,56 @@ const removeSupercededStackUpdates = (commits) => {
|
|||
}
|
||||
}
|
||||
|
||||
return [...notupdates, ...Object.values(newest).map(o => o.commit)];
|
||||
return [...notupdates, ...Object.values(newest).map((o) => o.commit)];
|
||||
};
|
||||
|
||||
/***
|
||||
**** Render
|
||||
***/
|
||||
**** Render
|
||||
***/
|
||||
|
||||
// @return the pull request's GitHub URL
|
||||
const buildPullURL = ghKey => `https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
|
||||
const buildPullURL = (ghKey: GHKey) =>
|
||||
`https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
|
||||
|
||||
const renderPull = ghKey => `[#${ghKey.number}](${buildPullURL(ghKey)})`;
|
||||
const renderPull = (ghKey: GHKey) =>
|
||||
`[#${ghKey.number}](${buildPullURL(ghKey)})`;
|
||||
|
||||
// @return the commit's GitHub URL
|
||||
const buildCommitURL = commit => `https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
|
||||
const buildCommitURL = (commit: Commit) =>
|
||||
`https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
|
||||
|
||||
const renderCommit = commit => `[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
|
||||
const renderCommit = (commit: Commit) =>
|
||||
`[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
|
||||
|
||||
// @return a markdown link to the PR if available; otherwise, the git commit
|
||||
function renderLink (commit) {
|
||||
function renderLink (commit: Commit) {
|
||||
const maybePull = commit.prKeys.values().next();
|
||||
return maybePull.value ? renderPull(maybePull.value) : renderCommit(commit);
|
||||
}
|
||||
|
||||
// @return a terser branch name,
|
||||
// e.g. '7-2-x' -> '7.2' and '8-x-y' -> '8'
|
||||
const renderBranchName = name => name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
|
||||
const renderBranchName = (name: string) =>
|
||||
name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
|
||||
|
||||
const renderTrop = (branch, ghKey) => `[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
|
||||
const renderTrop = (branch: string, ghKey: GHKey) =>
|
||||
`[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
|
||||
|
||||
// @return markdown-formatted links to other branches' trops,
|
||||
// e.g. "(Also in 7.2, 8, 9)"
|
||||
function renderTrops (commit, excludeBranch) {
|
||||
function renderTrops (commit: Commit, excludeBranch: string) {
|
||||
const body = [...commit.trops.entries()]
|
||||
.filter(([branch]) => branch !== excludeBranch)
|
||||
.sort(([branchA], [branchB]) => parseInt(branchA) - parseInt(branchB)) // sort by semver major
|
||||
.map(([branch, key]) => renderTrop(branch, key))
|
||||
.join(', ');
|
||||
return body ? `<span style="font-size:small;">(Also in ${body})</span>` : body;
|
||||
return body
|
||||
? `<span style="font-size:small;">(Also in ${body})</span>`
|
||||
: body;
|
||||
}
|
||||
|
||||
// @return a slightly cleaned-up human-readable change description
|
||||
function renderDescription (commit) {
|
||||
function renderDescription (commit: Commit) {
|
||||
let note = commit.note || commit.subject || '';
|
||||
note = note.trim();
|
||||
|
||||
|
@ -616,21 +740,26 @@ function renderDescription (commit) {
|
|||
|
||||
// @return markdown-formatted release note line item,
|
||||
// e.g. '* Fixed a foo. #12345 (Also in 7.2, 8, 9)'
|
||||
const renderNote = (commit, excludeBranch) =>
|
||||
`* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(commit, excludeBranch)}\n`;
|
||||
const renderNote = (commit: Commit, excludeBranch: string) =>
|
||||
`* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(
|
||||
commit,
|
||||
excludeBranch
|
||||
)}\n`;
|
||||
|
||||
const renderNotes = (notes, unique = false) => {
|
||||
const renderNotes = (notes: Awaited<ReturnType<typeof getNotes>>, unique = false) => {
|
||||
const rendered = [`# Release Notes for ${notes.name}\n\n`];
|
||||
|
||||
const renderSection = (title, commits, unique) => {
|
||||
const renderSection = (title: string, commits: Commit[], unique: boolean) => {
|
||||
if (unique) {
|
||||
// omit changes that also landed in other branches
|
||||
commits = commits.filter((commit) => renderTrops(commit, notes.toBranch).length === 0);
|
||||
commits = commits.filter(
|
||||
(commit) => renderTrops(commit, notes.toBranch).length === 0
|
||||
);
|
||||
}
|
||||
if (commits.length > 0) {
|
||||
rendered.push(
|
||||
`## ${title}\n\n`,
|
||||
...(commits.map(commit => renderNote(commit, notes.toBranch)).sort())
|
||||
...commits.map((commit) => renderNote(commit, notes.toBranch)).sort()
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -641,8 +770,12 @@ const renderNotes = (notes, unique = false) => {
|
|||
renderSection('Other Changes', notes.other, unique);
|
||||
|
||||
if (notes.docs.length) {
|
||||
const docs = notes.docs.map(commit => renderLink(commit)).sort();
|
||||
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n');
|
||||
const docs = notes.docs.map((commit) => renderLink(commit)).sort();
|
||||
rendered.push(
|
||||
'## Documentation\n\n',
|
||||
` * Documentation changes: ${docs.join(', ')}\n`,
|
||||
'\n'
|
||||
);
|
||||
}
|
||||
|
||||
renderSection('Unknown', notes.unknown, unique);
|
||||
|
@ -651,10 +784,8 @@ const renderNotes = (notes, unique = false) => {
|
|||
};
|
||||
|
||||
/***
|
||||
**** Module
|
||||
***/
|
||||
**** Module
|
||||
***/
|
||||
|
||||
module.exports = {
|
||||
get: getNotes,
|
||||
render: renderNotes
|
||||
};
|
||||
export const get = getNotes;
|
||||
export const render = renderNotes;
|
Loading…
Add table
Add a link
Reference in a new issue