build: use aws-sdk-js instead of boto (#25693)

This commit is contained in:
Jeremy Rose 2020-09-30 13:30:10 -07:00 committed by GitHub
parent e9876aecf9
commit 7027217dbb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 161 additions and 138 deletions

40
script/lib/s3put.js Normal file
View file

@ -0,0 +1,40 @@
/* eslint-disable camelcase */
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require('path');
AWS.config.update({ region: 'us-west-2' });
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
const args = require('minimist')(process.argv.slice(2));
let { bucket, prefix = '/', key_prefix = '', grant, _: files } = args;
if (prefix && !prefix.endsWith(path.sep)) prefix = path.resolve(prefix) + path.sep;
function filenameToKey (file) {
file = path.resolve(file);
if (file.startsWith(prefix)) file = file.substr(prefix.length - 1);
return key_prefix + file.replace(path.sep, '/');
}
let anErrorOccurred = false;
function next (done) {
const file = files.shift();
if (!file) return done();
const key = filenameToKey(file);
console.log(`Uploading '${file}' to bucket '${bucket}' with key '${key}'...`);
s3.upload({
Bucket: bucket,
Key: key,
Body: fs.createReadStream(file),
ACL: grant
}, (err, data) => {
if (err) {
console.error(err);
anErrorOccurred = true;
}
next(done);
});
}
next(() => {
process.exit(anErrorOccurred ? 1 : 0);
});

View file

@ -26,8 +26,6 @@ ELECTRON_DIR = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
)
SRC_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..', '..'))
BOTO_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..', 'vendor',
'boto'))
NPM = 'npm'
if sys.platform in ['win32', 'cygwin']:
@ -195,33 +193,19 @@ def get_electron_version():
with open(version_file) as f:
return 'v' + f.read().strip()
def boto_path_dirs():
return [
os.path.join(BOTO_DIR, 'build', 'lib'),
os.path.join(BOTO_DIR, 'build', 'lib.linux-x86_64-2.7')
]
def run_boto_script(access_key, secret_key, script_name, *args):
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
env = os.environ.copy()
env['AWS_ACCESS_KEY_ID'] = access_key
env['AWS_SECRET_ACCESS_KEY'] = secret_key
env['PYTHONPATH'] = os.path.pathsep.join(
[env.get('PYTHONPATH', '')] + boto_path_dirs())
boto = os.path.join(BOTO_DIR, 'bin', script_name)
execute([sys.executable, boto] + list(args), env)
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
args = [
output = execute([
'node',
os.path.join(os.path.dirname(__file__), 's3put.js'),
'--bucket', bucket,
'--prefix', prefix,
'--key_prefix', key_prefix,
'--grant', 'public-read'
] + files
run_boto_script(access_key, secret_key, 's3put', *args)
'--grant', 'public-read',
] + files, env)
print(output)
def add_exec_bit(filename):

View file

@ -1,47 +0,0 @@
#!/usr/bin/env python
# Download individual checksum files for Electron zip files from S3,
# concatenate them, and upload to GitHub.
from __future__ import print_function
import argparse
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from lib.config import s3_config
from lib.util import boto_path_dirs
sys.path.extend(boto_path_dirs())
from boto.s3.connection import S3Connection
def main():
args = parse_args()
bucket_name, access_key, secret_key = s3_config()
s3 = S3Connection(access_key, secret_key)
bucket = s3.get_bucket(bucket_name)
if bucket is None:
print('S3 bucket "{}" does not exist!'.format(bucket_name), file=sys.stderr)
return 1
prefix = 'atom-shell/tmp/{0}/'.format(args.version)
shasums = [s3_object.get_contents_as_string().strip()
for s3_object in bucket.list(prefix, delimiter='/')
if s3_object.key.endswith('.sha256sum')]
print('\n'.join(shasums))
return 0
def parse_args():
parser = argparse.ArgumentParser(description='Upload SHASUMS files to GitHub')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())

View file

@ -22,6 +22,7 @@ const sumchecker = require('sumchecker');
const temp = require('temp').track();
const { URL } = require('url');
const { Octokit } = require('@octokit/rest');
const AWS = require('aws-sdk');
require('colors');
const pass = '✓'.green;
@ -218,6 +219,41 @@ function uploadIndexJson () {
console.log(`${pass} Done uploading index.json to S3.`);
}
async function mergeShasums (pkgVersion) {
// Download individual checksum files for Electron zip files from S3,
// concatenate them, and upload to GitHub.
const bucket = process.env.ELECTRON_S3_BUCKET;
const accessKeyId = process.env.ELECTRON_S3_ACCESS_KEY;
const secretAccessKey = process.env.ELECTRON_S3_SECRET_KEY;
if (!bucket || !accessKeyId || !secretAccessKey) {
throw new Error('Please set the $ELECTRON_S3_BUCKET, $ELECTRON_S3_ACCESS_KEY, and $ELECTRON_S3_SECRET_KEY environment variables');
}
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
accessKeyId,
secretAccessKey,
region: 'us-west-2'
});
const objects = await s3.listObjectsV2({
Bucket: bucket,
Prefix: `atom-shell/tmp/${pkgVersion}/`,
Delimiter: '/'
}).promise();
const shasums = [];
for (const obj of objects.Contents) {
if (obj.Key.endsWith('.sha256sum')) {
const data = await s3.getObject({
Bucket: bucket,
Key: obj.Key
}).promise();
shasums.push(data.toString('ascii').trim());
}
}
return shasums.join('\n');
}
async function createReleaseShasums (release) {
const fileName = 'SHASUMS256.txt';
const existingAssets = release.assets.filter(asset => asset.name === fileName);
@ -232,8 +268,7 @@ async function createReleaseShasums (release) {
});
}
console.log(`Creating and uploading the release ${fileName}.`);
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'merge-electron-checksums.py');
const checksums = runScript(scriptPath, ['-v', pkgVersion]);
const checksums = await mergeShasums(pkgVersion);
console.log(`${pass} Generated release SHASUMS.`);
const filePath = await saveShaSumFile(checksums, fileName);