build: remove S3 uploads (#34104)

This commit is contained in:
Keeley Hammond 2022-05-05 21:40:34 -07:00 committed by GitHub
parent a401360057
commit 0696320d28
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 16 additions and 98 deletions

View file

@ -20,12 +20,7 @@ let anErrorOccurred = false;
function next (done) {
const file = files.shift();
if (!file) return done();
let key = filenameToKey(file);
// TODO: When we drop s3put, migrate the key to not include atom-shell in the callsites
key = key.replace('atom-shell/dist/', 'headers/dist/');
key = key.replace('atom-shell/symbols/', 'symbols/');
key = key.replace('atom-shell/tmp/', 'checksums-scratchpad/');
key = key.replace('electron-artifacts/', 'release-builds/');
const key = filenameToKey(file);
const [containerName, ...keyPath] = key.split('/');
const blobKey = keyPath.join('/');

View file

@ -53,17 +53,6 @@ def get_env_var(name):
return value
def s3_config():
config = (get_env_var('S3_BUCKET'),
get_env_var('S3_ACCESS_KEY'),
get_env_var('S3_SECRET_KEY'))
message = ('Error: Please set the $ELECTRON_S3_BUCKET, '
'$ELECTRON_S3_ACCESS_KEY, and '
'$ELECTRON_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print('Running in verbose mode')
global verbose_mode

View file

@ -1,40 +0,0 @@
/* eslint-disable camelcase */
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require('path');
AWS.config.update({ region: 'us-west-2' });
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
const args = require('minimist')(process.argv.slice(2));
let { bucket, prefix = '/', key_prefix = '', grant, _: files } = args;
if (prefix && !prefix.endsWith(path.sep)) prefix = path.resolve(prefix) + path.sep;
function filenameToKey (file) {
file = path.resolve(file);
if (file.startsWith(prefix)) file = file.substr(prefix.length - 1);
return key_prefix + (path.sep === '\\' ? file.replace(/\\/g, '/') : file);
}
let anErrorOccurred = false;
function next (done) {
const file = files.shift();
if (!file) return done();
const key = filenameToKey(file);
console.log(`Uploading '${file}' to bucket '${bucket}' with key '${key}'...`);
s3.upload({
Bucket: bucket,
Key: key,
Body: fs.createReadStream(file),
ACL: grant
}, (err, data) => {
if (err) {
console.error(err);
anErrorOccurred = true;
}
next(done);
});
}
next(() => {
process.exit(anErrorOccurred ? 1 : 0);
});

View file

@ -15,7 +15,7 @@ except ImportError:
from urllib2 import urlopen
import zipfile
from lib.config import is_verbose_mode, s3_config
from lib.config import is_verbose_mode
ELECTRON_DIR = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
@ -156,26 +156,9 @@ def get_electron_version():
return 'v' + f.read().strip()
def store_artifact(prefix, key_prefix, files):
# Legacy S3 Bucket
s3put(prefix, key_prefix, files)
# New AZ Storage
# Azure Storage
azput(prefix, key_prefix, files)
def s3put(prefix, key_prefix, files):
bucket, access_key, secret_key = s3_config()
env = os.environ.copy()
env['AWS_ACCESS_KEY_ID'] = access_key
env['AWS_SECRET_ACCESS_KEY'] = secret_key
output = execute([
'node',
os.path.join(os.path.dirname(__file__), 's3put.js'),
'--bucket', bucket,
'--prefix', prefix,
'--key_prefix', key_prefix,
'--grant', 'public-read',
] + files, env)
print(output)
def azput(prefix, key_prefix, files):
env = os.environ.copy()
output = execute([