build: upload to AZ as well as S3 (#33573)
* build: upload to AZ aswell as S3 * fix: provide env to azput
This commit is contained in:
parent
0ac6d74536
commit
204b53e7b8
10 changed files with 345 additions and 93 deletions
48
script/lib/azput.js
Normal file
48
script/lib/azput.js
Normal file
|
@ -0,0 +1,48 @@
|
|||
/* eslint-disable camelcase */
|
||||
const { BlobServiceClient } = require('@azure/storage-blob');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE);
|
||||
|
||||
const args = require('minimist')(process.argv.slice(2));
|
||||
|
||||
let { prefix = '/', key_prefix = '', _: files } = args;
|
||||
if (prefix && !prefix.endsWith(path.sep)) prefix = path.resolve(prefix) + path.sep;
|
||||
|
||||
function filenameToKey (file) {
|
||||
file = path.resolve(file);
|
||||
if (file.startsWith(prefix)) file = file.substr(prefix.length - 1);
|
||||
return key_prefix + (path.sep === '\\' ? file.replace(/\\/g, '/') : file);
|
||||
}
|
||||
|
||||
let anErrorOccurred = false;
|
||||
function next (done) {
|
||||
const file = files.shift();
|
||||
if (!file) return done();
|
||||
let key = filenameToKey(file);
|
||||
// TODO: When we drop s3put, migrate the key to not include atom-shell in the callsites
|
||||
key = key.replace('atom-shell/dist/', 'headers/dist/');
|
||||
key = key.replace('atom-shell/symbols/', 'symbols/');
|
||||
key = key.replace('atom-shell/tmp/', 'checksums-scratchpad/');
|
||||
key = key.replace('electron-artifacts/', 'release-builds/');
|
||||
|
||||
const [containerName, ...keyPath] = key.split('/');
|
||||
const blobKey = keyPath.join('/');
|
||||
console.log(`Uploading '${file}' to container '${containerName}' with key '${blobKey}'...`);
|
||||
|
||||
const containerClient = blobServiceClient.getContainerClient(containerName);
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(blobKey);
|
||||
blockBlobClient.uploadFile(file)
|
||||
.then((uploadBlobResponse) => {
|
||||
console.log(`Upload block blob ${blobKey} successfully: https://artifacts.electronjs.org/${key}`, uploadBlobResponse.requestId);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
anErrorOccurred = true;
|
||||
})
|
||||
.then(() => next(done));
|
||||
}
|
||||
next(() => {
|
||||
process.exit(anErrorOccurred ? 1 : 0);
|
||||
});
|
|
@ -15,7 +15,7 @@ except ImportError:
|
|||
from urllib2 import urlopen
|
||||
import zipfile
|
||||
|
||||
from lib.config import is_verbose_mode
|
||||
from lib.config import is_verbose_mode, s3_config
|
||||
|
||||
ELECTRON_DIR = os.path.abspath(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
@ -155,7 +155,14 @@ def get_electron_version():
|
|||
with open(version_file) as f:
|
||||
return 'v' + f.read().strip()
|
||||
|
||||
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
|
||||
def store_artifact(prefix, key_prefix, files):
|
||||
# Legacy S3 Bucket
|
||||
s3put(prefix, key_prefix, files)
|
||||
# New AZ Storage
|
||||
azput(prefix, key_prefix, files)
|
||||
|
||||
def s3put(prefix, key_prefix, files):
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
env = os.environ.copy()
|
||||
env['AWS_ACCESS_KEY_ID'] = access_key
|
||||
env['AWS_SECRET_ACCESS_KEY'] = secret_key
|
||||
|
@ -169,6 +176,16 @@ def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
|
|||
] + files, env)
|
||||
print(output)
|
||||
|
||||
def azput(prefix, key_prefix, files):
|
||||
env = os.environ.copy()
|
||||
output = execute([
|
||||
'node',
|
||||
os.path.join(os.path.dirname(__file__), 'azput.js'),
|
||||
'--prefix', prefix,
|
||||
'--key_prefix', key_prefix,
|
||||
] + files, env)
|
||||
print(output)
|
||||
|
||||
def get_out_dir():
|
||||
out_dir = 'Debug'
|
||||
override = os.environ.get('ELECTRON_OUT_DIR')
|
||||
|
|
|
@ -17,8 +17,8 @@ const pkgVersion = `v${pkg.version}`;
|
|||
const path = require('path');
|
||||
const temp = require('temp').track();
|
||||
const { URL } = require('url');
|
||||
const { BlobServiceClient } = require('@azure/storage-blob');
|
||||
const { Octokit } = require('@octokit/rest');
|
||||
const AWS = require('aws-sdk');
|
||||
|
||||
require('colors');
|
||||
const pass = '✓'.green;
|
||||
|
@ -80,6 +80,8 @@ async function validateReleaseAssets (release, validatingRelease) {
|
|||
}
|
||||
const s3RemoteFiles = s3RemoteFilesForVersion(release.tag_name);
|
||||
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
|
||||
const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
|
||||
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,26 +183,36 @@ function assetsForVersion (version, validatingRelease) {
|
|||
return patterns;
|
||||
}
|
||||
|
||||
const cloudStoreFilePaths = (version) => [
|
||||
`iojs-${version}-headers.tar.gz`,
|
||||
`iojs-${version}.tar.gz`,
|
||||
`node-${version}.tar.gz`,
|
||||
'node.lib',
|
||||
'x64/node.lib',
|
||||
'win-x64/iojs.lib',
|
||||
'win-x86/iojs.lib',
|
||||
'win-arm64/iojs.lib',
|
||||
'win-x64/node.lib',
|
||||
'win-x86/node.lib',
|
||||
'win-arm64/node.lib',
|
||||
'arm64/node.lib',
|
||||
'SHASUMS.txt',
|
||||
'SHASUMS256.txt'
|
||||
];
|
||||
|
||||
function s3RemoteFilesForVersion (version) {
|
||||
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
|
||||
const versionPrefix = `${bucket}atom-shell/dist/${version}/`;
|
||||
const filePaths = [
|
||||
`iojs-${version}-headers.tar.gz`,
|
||||
`iojs-${version}.tar.gz`,
|
||||
`node-${version}.tar.gz`,
|
||||
'node.lib',
|
||||
'x64/node.lib',
|
||||
'win-x64/iojs.lib',
|
||||
'win-x86/iojs.lib',
|
||||
'win-arm64/iojs.lib',
|
||||
'win-x64/node.lib',
|
||||
'win-x86/node.lib',
|
||||
'win-arm64/node.lib',
|
||||
'arm64/node.lib',
|
||||
'SHASUMS.txt',
|
||||
'SHASUMS256.txt'
|
||||
];
|
||||
return filePaths.map((filePath) => ({
|
||||
return cloudStoreFilePaths(version).map((filePath) => ({
|
||||
file: filePath,
|
||||
url: `${versionPrefix}${filePath}`
|
||||
}));
|
||||
}
|
||||
|
||||
function azRemoteFilesForVersion (version) {
|
||||
const azCDN = 'https://artifacts.electronjs.org/headers/';
|
||||
const versionPrefix = `${azCDN}dist/${version}/`;
|
||||
return cloudStoreFilePaths(version).map((filePath) => ({
|
||||
file: filePath,
|
||||
url: `${versionPrefix}${filePath}`
|
||||
}));
|
||||
|
@ -221,49 +233,39 @@ function runScript (scriptName, scriptArgs, cwd) {
|
|||
}
|
||||
|
||||
function uploadNodeShasums () {
|
||||
console.log('Uploading Node SHASUMS file to S3.');
|
||||
console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
|
||||
runScript(scriptPath, ['-v', pkgVersion]);
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to S3.`);
|
||||
console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`);
|
||||
}
|
||||
|
||||
function uploadIndexJson () {
|
||||
console.log('Uploading index.json to S3.');
|
||||
console.log('Uploading index.json to artifacts.electronjs.org.');
|
||||
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
|
||||
runScript(scriptPath, [pkgVersion]);
|
||||
console.log(`${pass} Done uploading index.json to S3.`);
|
||||
console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
|
||||
}
|
||||
|
||||
async function mergeShasums (pkgVersion) {
|
||||
// Download individual checksum files for Electron zip files from S3,
|
||||
// Download individual checksum files for Electron zip files from artifact storage,
|
||||
// concatenate them, and upload to GitHub.
|
||||
|
||||
const bucket = process.env.ELECTRON_S3_BUCKET;
|
||||
const accessKeyId = process.env.ELECTRON_S3_ACCESS_KEY;
|
||||
const secretAccessKey = process.env.ELECTRON_S3_SECRET_KEY;
|
||||
if (!bucket || !accessKeyId || !secretAccessKey) {
|
||||
throw new Error('Please set the $ELECTRON_S3_BUCKET, $ELECTRON_S3_ACCESS_KEY, and $ELECTRON_S3_SECRET_KEY environment variables');
|
||||
const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
|
||||
if (!connectionString) {
|
||||
throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable');
|
||||
}
|
||||
|
||||
const s3 = new AWS.S3({
|
||||
apiVersion: '2006-03-01',
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region: 'us-west-2'
|
||||
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
|
||||
const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad');
|
||||
const blobsIter = containerClient.listBlobsFlat({
|
||||
prefix: `${pkgVersion}/`
|
||||
});
|
||||
const objects = await s3.listObjectsV2({
|
||||
Bucket: bucket,
|
||||
Prefix: `atom-shell/tmp/${pkgVersion}/`,
|
||||
Delimiter: '/'
|
||||
}).promise();
|
||||
const shasums = [];
|
||||
for (const obj of objects.Contents) {
|
||||
if (obj.Key.endsWith('.sha256sum')) {
|
||||
const data = await s3.getObject({
|
||||
Bucket: bucket,
|
||||
Key: obj.Key
|
||||
}).promise();
|
||||
shasums.push(data.Body.toString('ascii').trim());
|
||||
for await (const blob of blobsIter) {
|
||||
if (blob.name.endsWith('.sha256sum')) {
|
||||
const blobClient = containerClient.getBlockBlobClient(blob.name);
|
||||
const response = await blobClient.downloadToBuffer();
|
||||
shasums.push(response.toString('ascii').trim());
|
||||
}
|
||||
}
|
||||
return shasums.join('\n');
|
||||
|
|
|
@ -9,8 +9,8 @@ import urllib2
|
|||
sys.path.append(
|
||||
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
||||
|
||||
from lib.config import s3_config
|
||||
from lib.util import s3put, scoped_cwd, safe_mkdir, get_out_dir, ELECTRON_DIR
|
||||
from lib.util import store_artifact, scoped_cwd, safe_mkdir, get_out_dir, \
|
||||
ELECTRON_DIR
|
||||
|
||||
OUT_DIR = get_out_dir()
|
||||
|
||||
|
@ -59,9 +59,7 @@ def main():
|
|||
with open(index_json, "w") as f:
|
||||
f.write(new_content)
|
||||
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
|
||||
[index_json])
|
||||
store_artifact(OUT_DIR, 'atom-shell/dist', [index_json])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -10,8 +10,7 @@ import tempfile
|
|||
sys.path.append(
|
||||
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
||||
|
||||
from lib.config import s3_config
|
||||
from lib.util import download, rm_rf, s3put, safe_mkdir
|
||||
from lib.util import download, rm_rf, store_artifact, safe_mkdir
|
||||
|
||||
DIST_URL = 'https://electronjs.org/headers/'
|
||||
|
||||
|
@ -30,9 +29,8 @@ def main():
|
|||
]
|
||||
|
||||
if args.target_dir is None:
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
s3put(bucket, access_key, secret_key, directory,
|
||||
'atom-shell/dist/{0}'.format(args.version), checksums)
|
||||
store_artifact(directory, 'atom-shell/dist/{0}'.format(args.version),
|
||||
checksums)
|
||||
else:
|
||||
copy_files(checksums, args.target_dir)
|
||||
|
||||
|
|
|
@ -9,8 +9,9 @@ import sys
|
|||
sys.path.append(
|
||||
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
||||
|
||||
from lib.config import PLATFORM, get_target_arch, s3_config
|
||||
from lib.util import safe_mkdir, scoped_cwd, s3put, get_out_dir, get_dist_dir
|
||||
from lib.config import PLATFORM, get_target_arch
|
||||
from lib.util import safe_mkdir, scoped_cwd, store_artifact, get_out_dir, \
|
||||
get_dist_dir
|
||||
|
||||
DIST_DIR = get_dist_dir()
|
||||
OUT_DIR = get_out_dir()
|
||||
|
@ -26,9 +27,8 @@ HEADER_TAR_NAMES = [
|
|||
def main():
|
||||
args = parse_args()
|
||||
|
||||
# Upload node's headers to S3.
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
upload_node(bucket, access_key, secret_key, args.version)
|
||||
# Upload node's headers to artifact storage.
|
||||
upload_node(args.version)
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
@ -38,17 +38,17 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def upload_node(bucket, access_key, secret_key, version):
|
||||
def upload_node(version):
|
||||
with scoped_cwd(GEN_DIR):
|
||||
generated_tar = os.path.join(GEN_DIR, 'node_headers.tar.gz')
|
||||
for header_tar in HEADER_TAR_NAMES:
|
||||
versioned_header_tar = header_tar.format(version)
|
||||
shutil.copy2(generated_tar, os.path.join(GEN_DIR, versioned_header_tar))
|
||||
|
||||
s3put(bucket, access_key, secret_key, GEN_DIR,
|
||||
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
|
||||
s3put(bucket, access_key, secret_key, GEN_DIR,
|
||||
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
|
||||
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
|
||||
glob.glob('node-*.tar.gz'))
|
||||
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
|
||||
glob.glob('iojs-*.tar.gz'))
|
||||
|
||||
if PLATFORM == 'win32':
|
||||
if get_target_arch() == 'ia32':
|
||||
|
@ -73,16 +73,14 @@ def upload_node(bucket, access_key, secret_key, version):
|
|||
shutil.copy2(electron_lib, v4_node_lib)
|
||||
|
||||
# Upload the node.lib.
|
||||
s3put(bucket, access_key, secret_key, DIST_DIR,
|
||||
'atom-shell/dist/{0}'.format(version), [node_lib])
|
||||
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [node_lib])
|
||||
|
||||
# Upload the iojs.lib.
|
||||
s3put(bucket, access_key, secret_key, DIST_DIR,
|
||||
'atom-shell/dist/{0}'.format(version), [iojs_lib])
|
||||
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [iojs_lib])
|
||||
|
||||
# Upload the v4 node.lib.
|
||||
s3put(bucket, access_key, secret_key, DIST_DIR,
|
||||
'atom-shell/dist/{0}'.format(version), [v4_node_lib])
|
||||
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version),
|
||||
[v4_node_lib])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -14,8 +14,8 @@ def is_fs_case_sensitive():
|
|||
sys.path.append(
|
||||
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
||||
|
||||
from lib.config import PLATFORM, s3_config
|
||||
from lib.util import get_electron_branding, execute, s3put, \
|
||||
from lib.config import PLATFORM
|
||||
from lib.util import get_electron_branding, execute, store_artifact, \
|
||||
get_out_dir, ELECTRON_DIR
|
||||
|
||||
RELEASE_DIR = get_out_dir()
|
||||
|
@ -76,16 +76,15 @@ def main():
|
|||
for f in files:
|
||||
assert os.path.exists(f)
|
||||
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
upload_symbols(bucket, access_key, secret_key, files)
|
||||
upload_symbols(files)
|
||||
|
||||
|
||||
def run_symstore(pdb, dest, product):
|
||||
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
|
||||
|
||||
|
||||
def upload_symbols(bucket, access_key, secret_key, files):
|
||||
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
|
||||
def upload_symbols(files):
|
||||
store_artifact(SYMBOLS_DIR, 'atom-shell/symbols',
|
||||
files)
|
||||
|
||||
|
||||
|
|
|
@ -16,10 +16,10 @@ sys.path.append(
|
|||
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
||||
|
||||
from zipfile import ZipFile
|
||||
from lib.config import PLATFORM, get_target_arch,s3_config, \
|
||||
from lib.config import PLATFORM, get_target_arch, \
|
||||
get_zip_name, enable_verbose_mode, get_platform_key
|
||||
from lib.util import get_electron_branding, execute, get_electron_version, \
|
||||
s3put, get_electron_exec, get_out_dir, \
|
||||
store_artifact, get_electron_exec, get_out_dir, \
|
||||
SRC_DIR, ELECTRON_DIR, TS_NODE
|
||||
|
||||
|
||||
|
@ -342,14 +342,10 @@ def upload_electron(release, file_path, args):
|
|||
|
||||
# if upload_to_s3 is set, skip github upload.
|
||||
if args.upload_to_s3:
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
|
||||
args.upload_timestamp)
|
||||
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
|
||||
key_prefix, [file_path])
|
||||
store_artifact(os.path.dirname(file_path), key_prefix, [file_path])
|
||||
upload_sha256_checksum(args.version, file_path, key_prefix)
|
||||
s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
|
||||
print('{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix))
|
||||
return
|
||||
|
||||
# Upload the file.
|
||||
|
@ -369,7 +365,6 @@ def upload_io_to_github(release, filename, filepath, version):
|
|||
|
||||
|
||||
def upload_sha256_checksum(version, file_path, key_prefix=None):
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
checksum_path = '{}.sha256sum'.format(file_path)
|
||||
if key_prefix is None:
|
||||
key_prefix = 'atom-shell/tmp/{0}'.format(version)
|
||||
|
@ -380,8 +375,7 @@ def upload_sha256_checksum(version, file_path, key_prefix=None):
|
|||
filename = os.path.basename(file_path)
|
||||
with open(checksum_path, 'w') as checksum:
|
||||
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
|
||||
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
|
||||
key_prefix, [checksum_path])
|
||||
store_artifact(os.path.dirname(checksum_path), key_prefix, [checksum_path])
|
||||
|
||||
|
||||
def get_release(version):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue