build: offload hash checking logic to lambda worker during release (#29096)

This commit is contained in:
Samuel Attard 2021-05-11 09:30:35 -07:00 committed by GitHub
parent 2b84d79b18
commit 35f2ed8978
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 105 additions and 156 deletions

View file

@ -64,7 +64,6 @@
"shx": "^0.3.2", "shx": "^0.3.2",
"standard-markdown": "^6.0.0", "standard-markdown": "^6.0.0",
"stream-json": "^1.7.1", "stream-json": "^1.7.1",
"sumchecker": "^2.0.2",
"tap-xunit": "^2.4.1", "tap-xunit": "^2.4.1",
"temp": "^0.8.3", "temp": "^0.8.3",
"timers-browserify": "1.4.2", "timers-browserify": "1.4.2",

View file

@ -0,0 +1,31 @@
const AWS = require('aws-sdk');
const lambda = new AWS.Lambda({
credentials: {
accessKeyId: process.env.AWS_LAMBDA_EXECUTE_KEY,
secretAccessKey: process.env.AWS_LAMBDA_EXECUTE_SECRET
},
region: 'us-east-1'
});
module.exports = function getUrlHash (targetUrl, algorithm = 'sha256') {
return new Promise((resolve, reject) => {
lambda.invoke({
FunctionName: 'hasher',
Payload: JSON.stringify({
targetUrl,
algorithm
})
}, (err, data) => {
if (err) return reject(err);
try {
const response = JSON.parse(data.Payload);
if (response.statusCode !== 200) return reject(new Error('non-200 status code received from hasher function'));
if (!response.hash) return reject(new Error('Successful lambda call but failed to get valid hash'));
resolve(response.hash);
} catch (err) {
return reject(err);
}
});
});
};

View file

@ -135,8 +135,7 @@ async function pushRelease (branch) {
async function runReleaseBuilds (branch) { async function runReleaseBuilds (branch) {
await ciReleaseBuild(branch, { await ciReleaseBuild(branch, {
ghRelease: true, ghRelease: true
automaticRelease: args.automaticRelease
}); });
} }

View file

@ -5,20 +5,16 @@ if (!process.env.CI) require('dotenv-safe').load();
const args = require('minimist')(process.argv.slice(2), { const args = require('minimist')(process.argv.slice(2), {
boolean: [ boolean: [
'validateRelease', 'validateRelease',
'skipVersionCheck',
'automaticRelease',
'verboseNugget' 'verboseNugget'
], ],
default: { verboseNugget: false } default: { verboseNugget: false }
}); });
const fs = require('fs'); const fs = require('fs');
const { execSync } = require('child_process'); const { execSync } = require('child_process');
const nugget = require('nugget');
const got = require('got'); const got = require('got');
const pkg = require('../../package.json'); const pkg = require('../../package.json');
const pkgVersion = `v${pkg.version}`; const pkgVersion = `v${pkg.version}`;
const path = require('path'); const path = require('path');
const sumchecker = require('sumchecker');
const temp = require('temp').track(); const temp = require('temp').track();
const { URL } = require('url'); const { URL } = require('url');
const { Octokit } = require('@octokit/rest'); const { Octokit } = require('@octokit/rest');
@ -29,6 +25,7 @@ const pass = '✓'.green;
const fail = '✗'.red; const fail = '✗'.red;
const { ELECTRON_DIR } = require('../lib/utils'); const { ELECTRON_DIR } = require('../lib/utils');
const getUrlHash = require('./get-url-hash');
const octokit = new Octokit({ const octokit = new Octokit({
auth: process.env.ELECTRON_GITHUB_TOKEN auth: process.env.ELECTRON_GITHUB_TOKEN
@ -64,7 +61,7 @@ async function getDraftRelease (version, skipValidation) {
async function validateReleaseAssets (release, validatingRelease) { async function validateReleaseAssets (release, validatingRelease) {
const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort(); const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
const extantAssets = release.assets.map(asset => asset.name).sort(); const extantAssets = release.assets.map(asset => asset.name).sort();
const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort(); const downloadUrls = release.assets.map(asset => ({ url: asset.browser_download_url, file: asset.name })).sort((a, b) => a.file.localeCompare(b.file));
failureCount = 0; failureCount = 0;
requiredAssets.forEach(asset => { requiredAssets.forEach(asset => {
@ -74,15 +71,15 @@ async function validateReleaseAssets (release, validatingRelease) {
if (!validatingRelease || !release.draft) { if (!validatingRelease || !release.draft) {
if (release.draft) { if (release.draft) {
await verifyAssets(release); await verifyDraftGitHubReleaseAssets(release);
} else { } else {
await verifyShasums(downloadUrls) await verifyShasumsForRemoteFiles(downloadUrls)
.catch(err => { .catch(err => {
console.log(`${fail} error verifyingShasums`, err); console.log(`${fail} error verifyingShasums`, err);
}); });
} }
const s3Urls = s3UrlsForVersion(release.tag_name); const s3RemoteFiles = s3RemoteFilesForVersion(release.tag_name);
await verifyShasums(s3Urls, true); await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
} }
} }
@ -174,21 +171,29 @@ function assetsForVersion (version, validatingRelease) {
return patterns; return patterns;
} }
function s3UrlsForVersion (version) { function s3RemoteFilesForVersion (version) {
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/'; const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
const patterns = [ const versionPrefix = `${bucket}atom-shell/dist/${version}/`;
`${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`, const filePaths = [
`${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`, `iojs-${version}-headers.tar.gz`,
`${bucket}atom-shell/dist/${version}/node-${version}.tar.gz`, `iojs-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/node.lib`, `node-${version}.tar.gz`,
`${bucket}atom-shell/dist/${version}/win-x64/iojs.lib`, 'node.lib',
`${bucket}atom-shell/dist/${version}/win-x86/iojs.lib`, 'x64/node.lib',
`${bucket}atom-shell/dist/${version}/x64/node.lib`, 'win-x64/iojs.lib',
`${bucket}atom-shell/dist/${version}/SHASUMS.txt`, 'win-x86/iojs.lib',
`${bucket}atom-shell/dist/${version}/SHASUMS256.txt`, 'win-arm64/iojs.lib',
`${bucket}atom-shell/dist/index.json` 'win-x64/node.lib',
'win-x86/node.lib',
'win-arm64/node.lib',
'arm64/node.lib',
'SHASUMS.txt',
'SHASUMS256.txt'
]; ];
return patterns; return filePaths.map((filePath) => ({
file: filePath,
url: `${versionPrefix}${filePath}`
}));
} }
function runScript (scriptName, scriptArgs, cwd) { function runScript (scriptName, scriptArgs, cwd) {
@ -366,13 +371,13 @@ async function makeTempDir () {
}); });
} }
async function verifyAssets (release) { const SHASUM_256_FILENAME = 'SHASUMS256.txt';
const downloadDir = await makeTempDir(); const SHASUM_1_FILENAME = 'SHASUMS.txt';
console.log('Downloading files from GitHub to verify shasums'); async function verifyDraftGitHubReleaseAssets (release) {
const shaSumFile = 'SHASUMS256.txt'; console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
let filesToCheck = await Promise.all(release.assets.map(async asset => { const remoteFilesToHash = await Promise.all(release.assets.map(async asset => {
const requestOptions = await octokit.repos.getReleaseAsset.endpoint({ const requestOptions = await octokit.repos.getReleaseAsset.endpoint({
owner: 'electron', owner: 'electron',
repo: targetRepo, repo: targetRepo,
@ -391,137 +396,59 @@ async function verifyAssets (release) {
headers headers
}); });
await downloadFiles(response.headers.location, downloadDir, asset.name); return { url: response.headers.location, file: asset.name };
return asset.name;
})).catch(err => { })).catch(err => {
console.log(`${fail} Error downloading files from GitHub`, err); console.log(`${fail} Error downloading files from GitHub`, err);
process.exit(1); process.exit(1);
}); });
filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile); await verifyShasumsForRemoteFiles(remoteFilesToHash);
let checkerOpts;
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile,
checkerOpts,
fileSource: 'GitHub'
});
} }
function downloadFiles (urls, directory, targetName) { async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
return new Promise((resolve, reject) => { const response = await got(shaSumFileUrl);
const nuggetOpts = { dir: directory }; const raw = response.body;
nuggetOpts.quiet = !args.verboseNugget; return raw.split('\n').map(line => line.trim()).filter(Boolean).reduce((map, line) => {
if (targetName) nuggetOpts.target = targetName; const [sha, file] = line.split(' ');
map[file.slice(fileNamePrefix.length)] = sha;
nugget(urls, nuggetOpts, (err) => { return map;
if (err) { }, {});
reject(err);
} else {
console.log(`${pass} all files downloaded successfully!`);
resolve();
}
});
});
} }
async function verifyShasums (urls, isS3) { async function validateFileHashesAgainstShaSumMapping (remoteFilesWithHashes, mapping) {
const fileSource = isS3 ? 'S3' : 'GitHub'; for (const remoteFileWithHash of remoteFilesWithHashes) {
console.log(`Downloading files from ${fileSource} to verify shasums`); check(remoteFileWithHash.hash === mapping[remoteFileWithHash.file], `Release asset ${remoteFileWithHash.file} should have hash of ${mapping[remoteFileWithHash.file]} but found ${remoteFileWithHash.hash}`, true);
const downloadDir = await makeTempDir();
let filesToCheck = [];
try {
if (!isS3) {
await downloadFiles(urls, downloadDir);
filesToCheck = urls.map(url => {
const currentUrl = new URL(url);
return path.basename(currentUrl.pathname);
}).filter(file => file.indexOf('SHASUMS') === -1);
} else {
const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`;
await Promise.all(urls.map(async (url) => {
const currentUrl = new URL(url);
const dirname = path.dirname(currentUrl.pathname);
const filename = path.basename(currentUrl.pathname);
const s3VersionPathIdx = dirname.indexOf(s3VersionPath);
if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
filesToCheck.push(filename);
}
await downloadFiles(url, downloadDir);
} else {
const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length);
const fileDirectory = path.join(downloadDir, subDirectory);
try {
fs.statSync(fileDirectory);
} catch (err) {
fs.mkdirSync(fileDirectory);
}
filesToCheck.push(path.join(subDirectory, filename));
await downloadFiles(url, fileDirectory);
}
}));
}
} catch (err) {
console.log(`${fail} Error downloading files from ${fileSource}`, err);
process.exit(1);
}
console.log(`${pass} Successfully downloaded the files from ${fileSource}.`);
let checkerOpts;
if (isS3) {
checkerOpts = { defaultTextEncoding: 'binary' };
}
await validateChecksums({
algorithm: 'sha256',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS256.txt',
checkerOpts,
fileSource
});
if (isS3) {
await validateChecksums({
algorithm: 'sha1',
filesToCheck,
fileDirectory: downloadDir,
shaSumFile: 'SHASUMS.txt',
checkerOpts,
fileSource
});
} }
} }
async function validateChecksums (validationArgs) { async function verifyShasumsForRemoteFiles (remoteFilesToHash, filesAreNodeJSArtifacts = false) {
console.log(`Validating checksums for files from ${validationArgs.fileSource} ` + console.log(`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`);
`against ${validationArgs.shaSumFile}.`);
const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile); // Only used for node.js artifact uploads
const checker = new sumchecker.ChecksumValidator(validationArgs.algorithm, const shaSum1File = remoteFilesToHash.find(({ file }) => file === SHASUM_1_FILENAME);
shaSumFilePath, validationArgs.checkerOpts); // Used for both node.js artifact uploads and normal electron artifacts
await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck) const shaSum256File = remoteFilesToHash.find(({ file }) => file === SHASUM_256_FILENAME);
.catch(err => { remoteFilesToHash = remoteFilesToHash.filter(({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME);
if (err instanceof sumchecker.ChecksumMismatchError) {
console.error(`${fail} The checksum of ${err.filename} from ` + const remoteFilesWithHashes = await Promise.all(remoteFilesToHash.map(async (file) => {
`${validationArgs.fileSource} did not match the shasum in ` + return {
`${validationArgs.shaSumFile}`); hash: await getUrlHash(file.url, 'sha256'),
} else if (err instanceof sumchecker.ChecksumParseError) { ...file
console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` + };
`from ${validationArgs.fileSource} could not be parsed.`, err); }));
} else if (err instanceof sumchecker.NoChecksumFoundError) {
console.error(`${fail} The file ${err.filename} from ` + await validateFileHashesAgainstShaSumMapping(remoteFilesWithHashes, await getShaSumMappingFromUrl(shaSum256File.url, filesAreNodeJSArtifacts ? '' : '*'));
`${validationArgs.fileSource} was not in the shasum file ` +
`${validationArgs.shaSumFile}.`); if (filesAreNodeJSArtifacts) {
} else { const remoteFilesWithSha1Hashes = await Promise.all(remoteFilesToHash.map(async (file) => {
console.error(`${fail} Error matching files from ` + return {
`${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err); hash: await getUrlHash(file.url, 'sha1'),
} ...file
process.exit(1); };
}); }));
console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
`shasums defined in ${validationArgs.shaSumFile}.`); await validateFileHashesAgainstShaSumMapping(remoteFilesWithSha1Hashes, await getShaSumMappingFromUrl(shaSum1File.url, filesAreNodeJSArtifacts ? '' : '*'));
}
} }
makeRelease(args.validateRelease); makeRelease(args.validateRelease);

View file

@ -7474,13 +7474,6 @@ strip-json-comments@~2.0.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
sumchecker@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/sumchecker/-/sumchecker-2.0.2.tgz#0f42c10e5d05da5d42eea3e56c3399a37d6c5b3e"
integrity sha1-D0LBDl0F2l1C7qPlbDOZo31sWz4=
dependencies:
debug "^2.2.0"
supports-color@^4.1.0: supports-color@^4.1.0:
version "4.5.0" version "4.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b"