build: upload to AZ as well as S3 (#33573)

* build: upload to AZ aswell as S3

* fix: provide env to azput
This commit is contained in:
Samuel Attard 2022-04-04 02:32:57 -07:00 committed by GitHub
parent 0ac6d74536
commit 204b53e7b8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 345 additions and 93 deletions

View file

@ -17,8 +17,8 @@ const pkgVersion = `v${pkg.version}`;
const path = require('path');
const temp = require('temp').track();
const { URL } = require('url');
const { BlobServiceClient } = require('@azure/storage-blob');
const { Octokit } = require('@octokit/rest');
const AWS = require('aws-sdk');
require('colors');
const pass = '✓'.green;
@ -80,6 +80,8 @@ async function validateReleaseAssets (release, validatingRelease) {
}
const s3RemoteFiles = s3RemoteFilesForVersion(release.tag_name);
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
}
}
@ -181,26 +183,36 @@ function assetsForVersion (version, validatingRelease) {
return patterns;
}
const cloudStoreFilePaths = (version) => [
`iojs-${version}-headers.tar.gz`,
`iojs-${version}.tar.gz`,
`node-${version}.tar.gz`,
'node.lib',
'x64/node.lib',
'win-x64/iojs.lib',
'win-x86/iojs.lib',
'win-arm64/iojs.lib',
'win-x64/node.lib',
'win-x86/node.lib',
'win-arm64/node.lib',
'arm64/node.lib',
'SHASUMS.txt',
'SHASUMS256.txt'
];
function s3RemoteFilesForVersion (version) {
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
const versionPrefix = `${bucket}atom-shell/dist/${version}/`;
const filePaths = [
`iojs-${version}-headers.tar.gz`,
`iojs-${version}.tar.gz`,
`node-${version}.tar.gz`,
'node.lib',
'x64/node.lib',
'win-x64/iojs.lib',
'win-x86/iojs.lib',
'win-arm64/iojs.lib',
'win-x64/node.lib',
'win-x86/node.lib',
'win-arm64/node.lib',
'arm64/node.lib',
'SHASUMS.txt',
'SHASUMS256.txt'
];
return filePaths.map((filePath) => ({
return cloudStoreFilePaths(version).map((filePath) => ({
file: filePath,
url: `${versionPrefix}${filePath}`
}));
}
function azRemoteFilesForVersion (version) {
const azCDN = 'https://artifacts.electronjs.org/headers/';
const versionPrefix = `${azCDN}dist/${version}/`;
return cloudStoreFilePaths(version).map((filePath) => ({
file: filePath,
url: `${versionPrefix}${filePath}`
}));
@ -221,49 +233,39 @@ function runScript (scriptName, scriptArgs, cwd) {
}
function uploadNodeShasums () {
console.log('Uploading Node SHASUMS file to S3.');
console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
runScript(scriptPath, ['-v', pkgVersion]);
console.log(`${pass} Done uploading Node SHASUMS file to S3.`);
console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`);
}
function uploadIndexJson () {
console.log('Uploading index.json to S3.');
console.log('Uploading index.json to artifacts.electronjs.org.');
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
runScript(scriptPath, [pkgVersion]);
console.log(`${pass} Done uploading index.json to S3.`);
console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
}
async function mergeShasums (pkgVersion) {
// Download individual checksum files for Electron zip files from S3,
// Download individual checksum files for Electron zip files from artifact storage,
// concatenate them, and upload to GitHub.
const bucket = process.env.ELECTRON_S3_BUCKET;
const accessKeyId = process.env.ELECTRON_S3_ACCESS_KEY;
const secretAccessKey = process.env.ELECTRON_S3_SECRET_KEY;
if (!bucket || !accessKeyId || !secretAccessKey) {
throw new Error('Please set the $ELECTRON_S3_BUCKET, $ELECTRON_S3_ACCESS_KEY, and $ELECTRON_S3_SECRET_KEY environment variables');
const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
if (!connectionString) {
throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable');
}
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
accessKeyId,
secretAccessKey,
region: 'us-west-2'
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad');
const blobsIter = containerClient.listBlobsFlat({
prefix: `${pkgVersion}/`
});
const objects = await s3.listObjectsV2({
Bucket: bucket,
Prefix: `atom-shell/tmp/${pkgVersion}/`,
Delimiter: '/'
}).promise();
const shasums = [];
for (const obj of objects.Contents) {
if (obj.Key.endsWith('.sha256sum')) {
const data = await s3.getObject({
Bucket: bucket,
Key: obj.Key
}).promise();
shasums.push(data.Body.toString('ascii').trim());
for await (const blob of blobsIter) {
if (blob.name.endsWith('.sha256sum')) {
const blobClient = containerClient.getBlockBlobClient(blob.name);
const response = await blobClient.downloadToBuffer();
shasums.push(response.toString('ascii').trim());
}
}
return shasums.join('\n');

View file

@ -9,8 +9,8 @@ import urllib2
sys.path.append(
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
from lib.config import s3_config
from lib.util import s3put, scoped_cwd, safe_mkdir, get_out_dir, ELECTRON_DIR
from lib.util import store_artifact, scoped_cwd, safe_mkdir, get_out_dir, \
ELECTRON_DIR
OUT_DIR = get_out_dir()
@ -59,9 +59,7 @@ def main():
with open(index_json, "w") as f:
f.write(new_content)
bucket, access_key, secret_key = s3_config()
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
[index_json])
store_artifact(OUT_DIR, 'atom-shell/dist', [index_json])
if __name__ == '__main__':

View file

@ -10,8 +10,7 @@ import tempfile
sys.path.append(
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
from lib.config import s3_config
from lib.util import download, rm_rf, s3put, safe_mkdir
from lib.util import download, rm_rf, store_artifact, safe_mkdir
DIST_URL = 'https://electronjs.org/headers/'
@ -30,9 +29,8 @@ def main():
]
if args.target_dir is None:
bucket, access_key, secret_key = s3_config()
s3put(bucket, access_key, secret_key, directory,
'atom-shell/dist/{0}'.format(args.version), checksums)
store_artifact(directory, 'atom-shell/dist/{0}'.format(args.version),
checksums)
else:
copy_files(checksums, args.target_dir)

View file

@ -9,8 +9,9 @@ import sys
sys.path.append(
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
from lib.config import PLATFORM, get_target_arch, s3_config
from lib.util import safe_mkdir, scoped_cwd, s3put, get_out_dir, get_dist_dir
from lib.config import PLATFORM, get_target_arch
from lib.util import safe_mkdir, scoped_cwd, store_artifact, get_out_dir, \
get_dist_dir
DIST_DIR = get_dist_dir()
OUT_DIR = get_out_dir()
@ -26,9 +27,8 @@ HEADER_TAR_NAMES = [
def main():
args = parse_args()
# Upload node's headers to S3.
bucket, access_key, secret_key = s3_config()
upload_node(bucket, access_key, secret_key, args.version)
# Upload node's headers to artifact storage.
upload_node(args.version)
def parse_args():
@ -38,17 +38,17 @@ def parse_args():
return parser.parse_args()
def upload_node(bucket, access_key, secret_key, version):
def upload_node(version):
with scoped_cwd(GEN_DIR):
generated_tar = os.path.join(GEN_DIR, 'node_headers.tar.gz')
for header_tar in HEADER_TAR_NAMES:
versioned_header_tar = header_tar.format(version)
shutil.copy2(generated_tar, os.path.join(GEN_DIR, versioned_header_tar))
s3put(bucket, access_key, secret_key, GEN_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
s3put(bucket, access_key, secret_key, GEN_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
glob.glob('node-*.tar.gz'))
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
glob.glob('iojs-*.tar.gz'))
if PLATFORM == 'win32':
if get_target_arch() == 'ia32':
@ -73,16 +73,14 @@ def upload_node(bucket, access_key, secret_key, version):
shutil.copy2(electron_lib, v4_node_lib)
# Upload the node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [node_lib])
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [node_lib])
# Upload the iojs.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [iojs_lib])
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [iojs_lib])
# Upload the v4 node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [v4_node_lib])
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version),
[v4_node_lib])
if __name__ == '__main__':

View file

@ -14,8 +14,8 @@ def is_fs_case_sensitive():
sys.path.append(
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
from lib.config import PLATFORM, s3_config
from lib.util import get_electron_branding, execute, s3put, \
from lib.config import PLATFORM
from lib.util import get_electron_branding, execute, store_artifact, \
get_out_dir, ELECTRON_DIR
RELEASE_DIR = get_out_dir()
@ -76,16 +76,15 @@ def main():
for f in files:
assert os.path.exists(f)
bucket, access_key, secret_key = s3_config()
upload_symbols(bucket, access_key, secret_key, files)
upload_symbols(files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
def upload_symbols(files):
store_artifact(SYMBOLS_DIR, 'atom-shell/symbols',
files)

View file

@ -16,10 +16,10 @@ sys.path.append(
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
from zipfile import ZipFile
from lib.config import PLATFORM, get_target_arch,s3_config, \
from lib.config import PLATFORM, get_target_arch, \
get_zip_name, enable_verbose_mode, get_platform_key
from lib.util import get_electron_branding, execute, get_electron_version, \
s3put, get_electron_exec, get_out_dir, \
store_artifact, get_electron_exec, get_out_dir, \
SRC_DIR, ELECTRON_DIR, TS_NODE
@ -342,14 +342,10 @@ def upload_electron(release, file_path, args):
# if upload_to_s3 is set, skip github upload.
if args.upload_to_s3:
bucket, access_key, secret_key = s3_config()
key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
args.upload_timestamp)
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
key_prefix, [file_path])
store_artifact(os.path.dirname(file_path), key_prefix, [file_path])
upload_sha256_checksum(args.version, file_path, key_prefix)
s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
print('{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix))
return
# Upload the file.
@ -369,7 +365,6 @@ def upload_io_to_github(release, filename, filepath, version):
def upload_sha256_checksum(version, file_path, key_prefix=None):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
if key_prefix is None:
key_prefix = 'atom-shell/tmp/{0}'.format(version)
@ -380,8 +375,7 @@ def upload_sha256_checksum(version, file_path, key_prefix=None):
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
key_prefix, [checksum_path])
store_artifact(os.path.dirname(checksum_path), key_prefix, [checksum_path])
def get_release(version):