Add script to upload checksums of node headers, fixes #457.
This commit is contained in:
parent
fab11950ee
commit
0ca33dc6b8
3 changed files with 104 additions and 26 deletions
|
@ -30,6 +30,7 @@ def scoped_cwd(path):
|
||||||
|
|
||||||
|
|
||||||
def download(text, url, path):
|
def download(text, url, path):
|
||||||
|
safe_mkdir(os.path.dirname(path))
|
||||||
with open(path, 'w') as local_file:
|
with open(path, 'w') as local_file:
|
||||||
web_file = urllib2.urlopen(url)
|
web_file = urllib2.urlopen(url)
|
||||||
file_size = int(web_file.info().getheaders("Content-Length")[0])
|
file_size = int(web_file.info().getheaders("Content-Length")[0])
|
||||||
|
@ -55,6 +56,7 @@ def download(text, url, path):
|
||||||
print "%s done." % (text)
|
print "%s done." % (text)
|
||||||
else:
|
else:
|
||||||
print
|
print
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
def extract_tarball(tarball_path, member, destination):
|
def extract_tarball(tarball_path, member, destination):
|
||||||
|
@ -120,3 +122,28 @@ def execute(argv):
|
||||||
|
|
||||||
def get_atom_shell_version():
|
def get_atom_shell_version():
|
||||||
return subprocess.check_output(['git', 'describe', '--tags']).strip()
|
return subprocess.check_output(['git', 'describe', '--tags']).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def s3_config():
|
||||||
|
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
|
||||||
|
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
|
||||||
|
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
|
||||||
|
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
|
||||||
|
'$ATOM_SHELL_S3_ACCESS_KEY, and '
|
||||||
|
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
|
||||||
|
assert all(len(c) for c in config), message
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
|
||||||
|
args = [
|
||||||
|
's3put',
|
||||||
|
'--bucket', bucket,
|
||||||
|
'--access_key', access_key,
|
||||||
|
'--secret_key', secret_key,
|
||||||
|
'--prefix', prefix,
|
||||||
|
'--key_prefix', key_prefix,
|
||||||
|
'--grant', 'public-read'
|
||||||
|
] + files
|
||||||
|
|
||||||
|
execute(args)
|
||||||
|
|
70
script/upload-checksums.py
Executable file
70
script/upload-checksums.py
Executable file
|
@ -0,0 +1,70 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from lib.util import download, rm_rf, s3_config, s3put
|
||||||
|
|
||||||
|
|
||||||
|
DIST_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/atom-shell/dist/'
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
url = DIST_URL + args.version + '/'
|
||||||
|
directory, files = download_files(url, get_files_list(args.version))
|
||||||
|
checksums = [
|
||||||
|
create_checksum('sha1', directory, 'SHASUMS.txt', files),
|
||||||
|
create_checksum('sha256', directory, 'SHASUMS256.txt', files)
|
||||||
|
]
|
||||||
|
|
||||||
|
bucket, access_key, secret_key = s3_config()
|
||||||
|
s3put(bucket, access_key, secret_key, directory,
|
||||||
|
'atom-shell/dist/{0}'.format(args.version), checksums)
|
||||||
|
|
||||||
|
rm_rf(directory)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='upload sumsha file')
|
||||||
|
parser.add_argument('-v', '--version', help='Specify the version',
|
||||||
|
required=True)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_files_list(version):
|
||||||
|
return [
|
||||||
|
'node-{0}.tar.gz'.format(version),
|
||||||
|
'node.lib',
|
||||||
|
'x64/node.lib',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def download_files(url, files):
|
||||||
|
directory = tempfile.mkdtemp(prefix='atom-shell-tmp')
|
||||||
|
return directory, [
|
||||||
|
download(f, url + f, os.path.join(directory, f))
|
||||||
|
for f in files
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def create_checksum(algorithm, directory, filename, files):
|
||||||
|
lines = []
|
||||||
|
for path in files:
|
||||||
|
h = hashlib.new(algorithm)
|
||||||
|
with open(path, 'r') as f:
|
||||||
|
h.update(f.read())
|
||||||
|
lines.append(h.hexdigest() + ' ' + os.path.relpath(path, directory))
|
||||||
|
|
||||||
|
checksum_file = os.path.join(directory, filename)
|
||||||
|
with open(checksum_file, 'w') as f:
|
||||||
|
f.write('\n'.join(lines) + '\n')
|
||||||
|
return checksum_file
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
sys.exit(main())
|
|
@ -9,7 +9,8 @@ import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from lib.config import DIST_ARCH, NODE_VERSION, TARGET_PLATFORM
|
from lib.config import DIST_ARCH, NODE_VERSION, TARGET_PLATFORM
|
||||||
from lib.util import get_atom_shell_version, scoped_cwd, safe_mkdir, execute
|
from lib.util import get_atom_shell_version, scoped_cwd, safe_mkdir, execute, \
|
||||||
|
s3_config, s3put
|
||||||
from lib.github import GitHub
|
from lib.github import GitHub
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,6 +49,11 @@ def main():
|
||||||
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME))
|
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME))
|
||||||
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME))
|
upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME))
|
||||||
if args.publish_release:
|
if args.publish_release:
|
||||||
|
# Upload the SHASUMS.txt.
|
||||||
|
execute([sys.executable,
|
||||||
|
os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py')])
|
||||||
|
|
||||||
|
# Press the publish button.
|
||||||
publish_release(github, release_id)
|
publish_release(github, release_id)
|
||||||
|
|
||||||
# Upload node's headers to S3.
|
# Upload node's headers to S3.
|
||||||
|
@ -176,31 +182,6 @@ def auth_token():
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
def s3_config():
|
|
||||||
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
|
|
||||||
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
|
|
||||||
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
|
|
||||||
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
|
|
||||||
'$ATOM_SHELL_S3_ACCESS_KEY, and '
|
|
||||||
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
|
|
||||||
assert all(len(c) for c in config), message
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
|
|
||||||
args = [
|
|
||||||
's3put',
|
|
||||||
'--bucket', bucket,
|
|
||||||
'--access_key', access_key,
|
|
||||||
'--secret_key', secret_key,
|
|
||||||
'--prefix', prefix,
|
|
||||||
'--key_prefix', key_prefix,
|
|
||||||
'--grant', 'public-read'
|
|
||||||
] + files
|
|
||||||
|
|
||||||
execute(args)
|
|
||||||
|
|
||||||
|
|
||||||
def touch_x64_node_lib():
|
def touch_x64_node_lib():
|
||||||
x64_dir = os.path.join(OUT_DIR, 'x64')
|
x64_dir = os.path.join(OUT_DIR, 'x64')
|
||||||
safe_mkdir(x64_dir)
|
safe_mkdir(x64_dir)
|
||||||
|
|
Loading…
Add table
Reference in a new issue