build: migrate to GitHub Actions (#42671)
* build: migrate to GitHub Actions * build: strip binaries w/ target arch * chore: fix lint on GHA * build: remove CircleCI from release builds
This commit is contained in:
parent
bdf8c75d33
commit
ec64e675c3
26 changed files with 2190 additions and 201 deletions
|
@ -1,34 +1,82 @@
|
|||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
if [ -z "$MAS_BUILD" ]; then
|
||||
BUILD_TYPE="darwin"
|
||||
if [ "`uname`" == "Darwin" ]; then
|
||||
if [ -z "$MAS_BUILD" ]; then
|
||||
BUILD_TYPE="darwin"
|
||||
else
|
||||
BUILD_TYPE="mas"
|
||||
fi
|
||||
elif [ "`uname`" == "Linux" ]; then
|
||||
BUILD_TYPE="linux"
|
||||
else
|
||||
BUILD_TYPE="mas"
|
||||
echo "Unsupported platform"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo Creating generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}...
|
||||
rm -rf generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
|
||||
mkdir generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
|
||||
GENERATED_ARTIFACTS="generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
|
||||
|
||||
echo Creating $GENERATED_ARTIFACTS...
|
||||
rm -rf $GENERATED_ARTIFACTS
|
||||
mkdir $GENERATED_ARTIFACTS
|
||||
|
||||
SRC_ARTIFACTS="src_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
|
||||
|
||||
echo Creating $SRC_ARTIFACTS...
|
||||
rm -rf $SRC_ARTIFACTS
|
||||
mkdir $SRC_ARTIFACTS
|
||||
|
||||
mv_if_exist() {
|
||||
if [ -f "$1" ] || [ -d "$1" ]; then
|
||||
echo Storing $1
|
||||
mv $1 generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
|
||||
else
|
||||
echo Skipping $1 - It is not present on disk
|
||||
fi
|
||||
}
|
||||
cp_if_exist() {
|
||||
if [ -f "$1" ] || [ -d "$1" ]; then
|
||||
echo Storing $1
|
||||
cp $1 generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}
|
||||
mv $1 $GENERATED_ARTIFACTS
|
||||
else
|
||||
echo Skipping $1 - It is not present on disk
|
||||
fi
|
||||
}
|
||||
|
||||
cp_if_exist() {
|
||||
if [ -f "$1" ] || [ -d "$1" ]; then
|
||||
echo Storing $1
|
||||
cp $1 $GENERATED_ARTIFACTS
|
||||
else
|
||||
echo Skipping $1 - It is not present on disk
|
||||
fi
|
||||
}
|
||||
|
||||
move_src_dirs_if_exist() {
|
||||
mkdir src_artifacts
|
||||
|
||||
for dir in \
|
||||
src/out/Default/gen/node_headers \
|
||||
src/out/Default/overlapped-checker \
|
||||
src/out/Default/ffmpeg \
|
||||
src/out/Default/hunspell_dictionaries \
|
||||
src/third_party/electron_node \
|
||||
src/third_party/nan \
|
||||
src/cross-arch-snapshots \
|
||||
src/third_party/llvm-build \
|
||||
src/build/linux \
|
||||
src/buildtools/mac \
|
||||
src/buildtools/third_party/libc++ \
|
||||
src/buildtools/third_party/libc++abi \
|
||||
src/third_party/libc++ \
|
||||
src/third_party/libc++abi \
|
||||
src/out/Default/obj/buildtools/third_party \
|
||||
src/v8/tools/builtins-pgo
|
||||
do
|
||||
if [ -d "$dir" ]; then
|
||||
mkdir -p src_artifacts/$(dirname $dir)
|
||||
cp -r $dir/ src_artifacts/$dir
|
||||
fi
|
||||
done
|
||||
|
||||
tar -C src_artifacts -cf src_artifacts.tar ./
|
||||
|
||||
echo Storing src_artifacts.tar
|
||||
mv src_artifacts.tar $SRC_ARTIFACTS
|
||||
}
|
||||
|
||||
# Generated Artifacts
|
||||
mv_if_exist src/out/Default/dist.zip
|
||||
mv_if_exist src/out/Default/gen/node_headers.tar.gz
|
||||
mv_if_exist src/out/Default/symbols.zip
|
||||
|
@ -39,3 +87,5 @@ mv_if_exist src/out/Default/hunspell_dictionaries.zip
|
|||
mv_if_exist src/cross-arch-snapshots
|
||||
cp_if_exist src/out/electron_ninja_log
|
||||
cp_if_exist src/out/Default/.ninja_log
|
||||
|
||||
move_src_dirs_if_exist
|
|
@ -1,17 +1,30 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
GENERATED_ARTIFACTS="generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
|
||||
SRC_ARTIFACTS="src_artifacts_${BUILD_TYPE}_${TARGET_ARCH}"
|
||||
|
||||
mv_if_exist() {
|
||||
if [ -f "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ] || [ -d "generated_artifacts_${BUILD_TYPE}_${TARGET_ARCH}/$1" ]; then
|
||||
if [ -f "${GENERATED_ARTIFACTS}/$1" ] || [ -d "${GENERATED_ARTIFACTS}/$1" ]; then
|
||||
echo Restoring $1 to $2
|
||||
mkdir -p $2
|
||||
mv generated_artifacts_${BUILD_TYPE_${TARGET_ARCH}}/$1 $2
|
||||
mv $GENERATED_ARTIFACTS/$1 $2
|
||||
else
|
||||
echo Skipping $1 - It is not present on disk
|
||||
fi
|
||||
}
|
||||
|
||||
untar_if_exist() {
|
||||
if [ -f "${SRC_ARTIFACTS}/$1" ] || [ -d "${SRC_ARTIFACTS}/$1" ]; then
|
||||
echo Restoring $1 to current directory
|
||||
tar -xf ${SRC_ARTIFACTS}/$1
|
||||
else
|
||||
echo Skipping $1 - It is not present on disk
|
||||
fi
|
||||
}
|
||||
|
||||
echo Restoring artifacts from $GENERATED_ARTIFACTS
|
||||
|
||||
# Restore generated artifacts
|
||||
mv_if_exist dist.zip src/out/Default
|
||||
mv_if_exist node_headers.tar.gz src/out/Default/gen
|
||||
mv_if_exist symbols.zip src/out/Default
|
||||
|
@ -19,4 +32,9 @@ mv_if_exist mksnapshot.zip src/out/Default
|
|||
mv_if_exist chromedriver.zip src/out/Default
|
||||
mv_if_exist ffmpeg.zip src/out/ffmpeg
|
||||
mv_if_exist hunspell_dictionaries.zip src/out/Default
|
||||
mv_if_exist cross-arch-snapshots src
|
||||
mv_if_exist cross-arch-snapshots src
|
||||
|
||||
echo Restoring artifacts from $SRC_ARTIFACTS
|
||||
|
||||
# Restore src artifacts
|
||||
untar_if_exist src_artifacts.tar
|
7
script/actions/run-tests.sh
Executable file
7
script/actions/run-tests.sh
Executable file
|
@ -0,0 +1,7 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
export DISPLAY=:99
|
||||
Xvfb :99 -screen 0 1024x768x16 -ac &
|
||||
XVFB_PID=$!
|
||||
node "$@"
|
|
@ -9,11 +9,9 @@ const octokit = new Octokit({
|
|||
});
|
||||
|
||||
const BUILD_APPVEYOR_URL = 'https://ci.appveyor.com/api/builds';
|
||||
const CIRCLECI_PIPELINE_URL = 'https://circleci.com/api/v2/project/gh/electron/electron/pipeline';
|
||||
const GH_ACTIONS_PIPELINE_URL = 'https://github.com/electron/electron/actions';
|
||||
const GH_ACTIONS_API_URL = '/repos/electron/electron/actions';
|
||||
|
||||
const CIRCLECI_WAIT_TIME = process.env.CIRCLECI_WAIT_TIME || 30000;
|
||||
const GH_ACTIONS_WAIT_TIME = process.env.GH_ACTIONS_WAIT_TIME || 30000;
|
||||
|
||||
const appVeyorJobs = {
|
||||
|
@ -22,18 +20,9 @@ const appVeyorJobs = {
|
|||
'electron-woa': 'electron-woa-release'
|
||||
};
|
||||
|
||||
const circleCIPublishWorkflows = [
|
||||
'linux-publish',
|
||||
'macos-publish'
|
||||
];
|
||||
|
||||
const circleCIPublishIndividualArches = {
|
||||
'macos-publish': ['osx-x64', 'mas-x64', 'osx-arm64', 'mas-arm64'],
|
||||
'linux-publish': ['arm', 'arm64', 'x64']
|
||||
};
|
||||
|
||||
const ghActionsPublishWorkflows = [
|
||||
'macos-publish'
|
||||
'macos-publish',
|
||||
'linux-publish'
|
||||
];
|
||||
|
||||
let jobRequestedCount = 0;
|
||||
|
@ -120,120 +109,6 @@ async function githubActionsCall (targetBranch, workflowName, options) {
|
|||
}
|
||||
}
|
||||
|
||||
async function circleCIcall (targetBranch, workflowName, options) {
|
||||
console.log(`Triggering CircleCI to run build job: ${workflowName} on branch: ${targetBranch} with release flag.`);
|
||||
const buildRequest = {
|
||||
branch: targetBranch,
|
||||
parameters: {}
|
||||
};
|
||||
if (options.ghRelease) {
|
||||
buildRequest.parameters['upload-to-storage'] = '0';
|
||||
} else {
|
||||
buildRequest.parameters['upload-to-storage'] = '1';
|
||||
}
|
||||
buildRequest.parameters[`run-${workflowName}`] = true;
|
||||
if (options.arch) {
|
||||
const validArches = circleCIPublishIndividualArches[workflowName];
|
||||
assert(validArches.includes(options.arch), `Unknown CircleCI architecture "${options.arch}". Valid values are ${JSON.stringify(validArches)}`);
|
||||
buildRequest.parameters['macos-publish-arch-limit'] = options.arch;
|
||||
}
|
||||
|
||||
jobRequestedCount++;
|
||||
// The logic below expects that the CircleCI workflows for releases each
|
||||
// contain only one job in order to maintain compatibility with sudowoodo.
|
||||
// If the workflows are changed in the CircleCI config.yml, this logic will
|
||||
// also need to be changed as well as possibly changing sudowoodo.
|
||||
try {
|
||||
const circleResponse = await circleCIRequest(CIRCLECI_PIPELINE_URL, 'POST', buildRequest);
|
||||
console.log(`CircleCI release build pipeline ${circleResponse.id} for ${workflowName} triggered.`);
|
||||
const workflowId = await getCircleCIWorkflowId(circleResponse.id);
|
||||
if (workflowId === -1) {
|
||||
return;
|
||||
}
|
||||
const workFlowUrl = `https://circleci.com/workflow-run/${workflowId}`;
|
||||
if (options.runningPublishWorkflows) {
|
||||
console.log(`CircleCI release workflow request for ${workflowName} successful. Check ${workFlowUrl} for status.`);
|
||||
} else {
|
||||
console.log(`CircleCI release build workflow running at https://circleci.com/workflow-run/${workflowId} for ${workflowName}.`);
|
||||
const jobNumber = await getCircleCIJobNumber(workflowId);
|
||||
if (jobNumber === -1) {
|
||||
return;
|
||||
}
|
||||
const jobUrl = `https://circleci.com/gh/electron/electron/${jobNumber}`;
|
||||
console.log(`CircleCI release build request for ${workflowName} successful. Check ${jobUrl} for status.`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error calling CircleCI: ', err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCircleCIWorkflowId (pipelineId) {
|
||||
const pipelineInfoUrl = `https://circleci.com/api/v2/pipeline/${pipelineId}`;
|
||||
let workflowId = 0;
|
||||
while (workflowId === 0) {
|
||||
const pipelineInfo = await circleCIRequest(pipelineInfoUrl, 'GET');
|
||||
switch (pipelineInfo.state) {
|
||||
case 'created': {
|
||||
const workflows = await circleCIRequest(`${pipelineInfoUrl}/workflow`, 'GET');
|
||||
// The logic below expects three workflow.items: publish, lint, & setup
|
||||
if (workflows.items.length === 3) {
|
||||
workflowId = workflows.items.find(item => item.name.includes('publish')).id;
|
||||
break;
|
||||
}
|
||||
console.log('Unexpected number of workflows, response was:', workflows);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
case 'error': {
|
||||
console.log('Error retrieving workflows, response was:', pipelineInfo);
|
||||
workflowId = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return workflowId;
|
||||
}
|
||||
|
||||
async function getCircleCIJobNumber (workflowId) {
|
||||
const jobInfoUrl = `https://circleci.com/api/v2/workflow/${workflowId}/job`;
|
||||
let jobNumber = 0;
|
||||
while (jobNumber === 0) {
|
||||
const jobInfo = await circleCIRequest(jobInfoUrl, 'GET');
|
||||
if (!jobInfo.items) {
|
||||
continue;
|
||||
}
|
||||
if (jobInfo.items.length !== 1) {
|
||||
console.log('Unexpected number of jobs, response was:', jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (jobInfo.items[0].status) {
|
||||
case 'not_running':
|
||||
case 'queued':
|
||||
case 'running': {
|
||||
if (jobInfo.items[0].job_number && !isNaN(jobInfo.items[0].job_number)) {
|
||||
jobNumber = jobInfo.items[0].job_number;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'canceled':
|
||||
case 'error':
|
||||
case 'infrastructure_fail':
|
||||
case 'timedout':
|
||||
case 'not_run':
|
||||
case 'failed': {
|
||||
console.log(`Error job returned a status of ${jobInfo.items[0].status}, response was:`, jobInfo);
|
||||
jobNumber = -1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, CIRCLECI_WAIT_TIME));
|
||||
}
|
||||
return jobNumber;
|
||||
}
|
||||
|
||||
async function getGitHubActionsRun (workflowId, headCommit) {
|
||||
let runNumber = 0;
|
||||
let actionRun;
|
||||
|
@ -287,33 +162,6 @@ async function getGitHubActionsRun (workflowId, headCommit) {
|
|||
return runNumber;
|
||||
}
|
||||
|
||||
async function circleCIRequest (url, method, requestBody) {
|
||||
const requestOpts = {
|
||||
username: process.env.CIRCLE_TOKEN,
|
||||
password: '',
|
||||
method,
|
||||
url,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json'
|
||||
}
|
||||
};
|
||||
if (requestBody) {
|
||||
requestOpts.body = JSON.stringify(requestBody);
|
||||
}
|
||||
|
||||
return makeRequest(requestOpts, true).catch(err => {
|
||||
if (err.response?.body) {
|
||||
console.error('Could not call CircleCI: ', {
|
||||
statusCode: err.response.statusCode,
|
||||
body: JSON.parse(err.response.body)
|
||||
});
|
||||
} else {
|
||||
console.error('Error calling CircleCI:', err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function callAppVeyor (targetBranch, job, options) {
|
||||
console.log(`Triggering AppVeyor to run build job: ${job} on branch: ${targetBranch} with release flag.`);
|
||||
const environmentVariables = {
|
||||
|
@ -372,19 +220,6 @@ function buildAppVeyor (targetBranch, options) {
|
|||
}
|
||||
}
|
||||
|
||||
function buildCircleCI (targetBranch, options) {
|
||||
if (options.job) {
|
||||
assert(circleCIPublishWorkflows.includes(options.job), `Unknown CircleCI workflow name: ${options.job}. Valid values are: ${circleCIPublishWorkflows}.`);
|
||||
circleCIcall(targetBranch, options.job, options);
|
||||
} else {
|
||||
assert(!options.arch, 'Cannot provide a single architecture while building all workflows, please specify a single workflow via --workflow');
|
||||
options.runningPublishWorkflows = true;
|
||||
for (const job of circleCIPublishWorkflows) {
|
||||
circleCIcall(targetBranch, job, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
function buildGHActions (targetBranch, options) {
|
||||
if (options.job) {
|
||||
|
@ -402,14 +237,10 @@ function buildGHActions (targetBranch, options) {
|
|||
function runRelease (targetBranch, options) {
|
||||
if (options.ci) {
|
||||
switch (options.ci) {
|
||||
case 'CircleCI': {
|
||||
buildCircleCI(targetBranch, options);
|
||||
case 'GitHubActions': {
|
||||
buildGHActions(targetBranch, options);
|
||||
break;
|
||||
}
|
||||
// case 'GitHubActions': {
|
||||
// buildGHActions(targetBranch, options);
|
||||
// break;
|
||||
// }
|
||||
case 'AppVeyor': {
|
||||
buildAppVeyor(targetBranch, options);
|
||||
break;
|
||||
|
@ -420,9 +251,8 @@ function runRelease (targetBranch, options) {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
buildCircleCI(targetBranch, options);
|
||||
buildAppVeyor(targetBranch, options);
|
||||
// buildGHActions(targetBranch, options);
|
||||
buildGHActions(targetBranch, options);
|
||||
}
|
||||
console.log(`${jobRequestedCount} jobs were requested.`);
|
||||
}
|
||||
|
@ -436,8 +266,8 @@ if (require.main === module) {
|
|||
const targetBranch = args._[0];
|
||||
if (args._.length < 1) {
|
||||
console.log(`Trigger CI to build release builds of electron.
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--arch=INDIVIDUAL_ARCH] [--ci=CircleCI|AppVeyor|GitHubActions]
|
||||
[--ghRelease] [--circleBuildNum=xxx] [--appveyorJobId=xxx] [--commit=sha] TARGET_BRANCH
|
||||
Usage: ci-release-build.js [--job=CI_JOB_NAME] [--arch=INDIVIDUAL_ARCH] [--ci=AppVeyor|GitHubActions]
|
||||
[--ghRelease] [--appveyorJobId=xxx] [--commit=sha] TARGET_BRANCH
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
|
32
script/split-tests.js
Executable file
32
script/split-tests.js
Executable file
|
@ -0,0 +1,32 @@
|
|||
const fs = require('node:fs');
|
||||
const glob = require('glob');
|
||||
|
||||
const currentShard = parseInt(process.argv[2], 10);
|
||||
const shardCount = parseInt(process.argv[3], 10);
|
||||
|
||||
const specFiles = glob.sync('spec/*-spec.ts');
|
||||
|
||||
const buckets = [];
|
||||
|
||||
for (let i = 0; i < shardCount; i++) {
|
||||
buckets.push([]);
|
||||
}
|
||||
|
||||
const testsInSpecFile = Object.create(null);
|
||||
for (const specFile of specFiles) {
|
||||
const testContent = fs.readFileSync(specFile, 'utf8');
|
||||
testsInSpecFile[specFile] = testContent.split('it(').length;
|
||||
}
|
||||
|
||||
specFiles.sort((a, b) => {
|
||||
return testsInSpecFile[b] - testsInSpecFile[a];
|
||||
});
|
||||
|
||||
let shard = 0;
|
||||
for (const specFile of specFiles) {
|
||||
buckets[shard].push(specFile);
|
||||
shard++;
|
||||
if (shard === shardCount) shard = 0;
|
||||
}
|
||||
|
||||
console.log(buckets[currentShard - 1].join(' '));
|
Loading…
Add table
Add a link
Reference in a new issue