2013-06-24 09:56:51 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2019-06-15 17:26:09 +00:00
|
|
|
from __future__ import print_function
|
2013-08-31 02:48:47 +00:00
|
|
|
import argparse
|
2018-05-22 17:50:44 +00:00
|
|
|
import datetime
|
2016-08-01 12:03:55 +00:00
|
|
|
import hashlib
|
2018-07-09 22:46:31 +00:00
|
|
|
import json
|
2020-01-29 12:22:04 +00:00
|
|
|
import mmap
|
2013-06-24 09:56:51 +00:00
|
|
|
import os
|
2018-09-27 20:14:13 +00:00
|
|
|
import shutil
|
2013-06-24 09:56:51 +00:00
|
|
|
import subprocess
|
2020-01-29 12:22:04 +00:00
|
|
|
from struct import Struct
|
2013-06-29 03:36:02 +00:00
|
|
|
import sys
|
2013-06-24 09:56:51 +00:00
|
|
|
|
2019-06-26 18:32:42 +00:00
|
|
|
sys.path.append(
|
|
|
|
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
|
2019-06-24 17:18:04 +00:00
|
|
|
|
2020-01-13 20:40:13 +00:00
|
|
|
from zipfile import ZipFile
|
2017-02-14 18:11:17 +00:00
|
|
|
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
|
2020-11-09 20:30:43 +00:00
|
|
|
get_zip_name, enable_verbose_mode, get_platform_key
|
2018-09-27 18:53:08 +00:00
|
|
|
from lib.util import get_electron_branding, execute, get_electron_version, \
|
2020-09-14 01:25:49 +00:00
|
|
|
s3put, get_electron_exec, get_out_dir, \
|
|
|
|
SRC_DIR, ELECTRON_DIR
|
2013-06-29 03:36:02 +00:00
|
|
|
|
2013-06-24 09:56:51 +00:00
|
|
|
|
2016-05-24 17:27:46 +00:00
|
|
|
ELECTRON_REPO = 'electron/electron'
|
|
|
|
ELECTRON_VERSION = get_electron_version()
|
2013-08-31 02:48:47 +00:00
|
|
|
|
2018-09-27 18:53:08 +00:00
|
|
|
PROJECT_NAME = get_electron_branding()['project_name']
|
|
|
|
PRODUCT_NAME = get_electron_branding()['product_name']
|
2015-04-17 04:13:20 +00:00
|
|
|
|
2018-09-27 20:14:13 +00:00
|
|
|
OUT_DIR = get_out_dir()
|
2016-08-26 00:50:12 +00:00
|
|
|
|
|
|
|
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
|
|
|
|
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
|
|
|
|
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
|
|
|
|
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
|
2019-11-21 01:21:44 +00:00
|
|
|
DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
|
2020-10-19 11:55:27 +00:00
|
|
|
TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
|
|
|
|
'toolchain-profile')
|
2013-06-24 09:56:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2013-08-31 02:48:47 +00:00
|
|
|
args = parse_args()
|
2020-11-09 20:30:43 +00:00
|
|
|
if args.verbose:
|
|
|
|
enable_verbose_mode()
|
|
|
|
if args.upload_to_s3:
|
2018-05-22 17:50:44 +00:00
|
|
|
utcnow = datetime.datetime.utcnow()
|
2018-07-19 21:12:12 +00:00
|
|
|
args.upload_timestamp = utcnow.strftime('%Y%m%d')
|
2013-08-31 02:48:47 +00:00
|
|
|
|
2017-10-23 15:02:50 +00:00
|
|
|
build_version = get_electron_build_version()
|
|
|
|
if not ELECTRON_VERSION.startswith(build_version):
|
|
|
|
error = 'Tag name ({0}) should match build version ({1})\n'.format(
|
|
|
|
ELECTRON_VERSION, build_version)
|
|
|
|
sys.stderr.write(error)
|
|
|
|
sys.stderr.flush()
|
|
|
|
return 1
|
2013-10-03 03:48:13 +00:00
|
|
|
|
2015-07-03 05:43:20 +00:00
|
|
|
tag_exists = False
|
2018-07-09 22:46:31 +00:00
|
|
|
release = get_release(args.version)
|
|
|
|
if not release['draft']:
|
|
|
|
tag_exists = True
|
2015-07-03 05:43:20 +00:00
|
|
|
|
2017-11-02 08:06:28 +00:00
|
|
|
if not args.upload_to_s3:
|
2020-10-19 11:55:27 +00:00
|
|
|
assert release['exists'], \
|
|
|
|
'Release does not exist; cannot upload to GitHub!'
|
2017-11-02 08:06:28 +00:00
|
|
|
assert tag_exists == args.overwrite, \
|
|
|
|
'You have to pass --overwrite to overwrite a published release'
|
2018-05-14 21:21:51 +00:00
|
|
|
|
2018-07-09 22:46:31 +00:00
|
|
|
# Upload Electron files.
|
2018-09-27 20:14:13 +00:00
|
|
|
# Rename dist.zip to get_zip_name('electron', version, suffix='')
|
|
|
|
electron_zip = os.path.join(OUT_DIR, DIST_NAME)
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'dist.zip'), electron_zip)
|
|
|
|
upload_electron(release, electron_zip, args)
|
2017-11-23 00:19:01 +00:00
|
|
|
if get_target_arch() != 'mips64el':
|
2018-09-27 20:14:13 +00:00
|
|
|
symbols_zip = os.path.join(OUT_DIR, SYMBOLS_NAME)
|
2018-09-29 00:00:33 +00:00
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'symbols.zip'), symbols_zip)
|
2018-09-27 20:14:13 +00:00
|
|
|
upload_electron(release, symbols_zip, args)
|
2020-11-09 20:30:43 +00:00
|
|
|
if get_platform_key() == 'darwin':
|
|
|
|
if get_target_arch() == 'x64':
|
|
|
|
api_path = os.path.join(ELECTRON_DIR, 'electron-api.json')
|
|
|
|
upload_electron(release, api_path, args)
|
|
|
|
|
|
|
|
ts_defs_path = os.path.join(ELECTRON_DIR, 'electron.d.ts')
|
|
|
|
upload_electron(release, ts_defs_path, args)
|
2018-09-25 01:48:49 +00:00
|
|
|
|
2018-09-27 20:14:13 +00:00
|
|
|
dsym_zip = os.path.join(OUT_DIR, DSYM_NAME)
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'dsym.zip'), dsym_zip)
|
|
|
|
upload_electron(release, dsym_zip, args)
|
2016-06-24 12:26:05 +00:00
|
|
|
elif PLATFORM == 'win32':
|
2018-09-29 00:00:33 +00:00
|
|
|
pdb_zip = os.path.join(OUT_DIR, PDB_NAME)
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'pdb.zip'), pdb_zip)
|
|
|
|
upload_electron(release, pdb_zip, args)
|
2019-11-21 01:21:44 +00:00
|
|
|
elif PLATFORM == 'linux':
|
|
|
|
debug_zip = os.path.join(OUT_DIR, DEBUG_NAME)
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'debug.zip'), debug_zip)
|
|
|
|
upload_electron(release, debug_zip, args)
|
2014-09-20 14:39:52 +00:00
|
|
|
|
2016-02-19 02:26:18 +00:00
|
|
|
# Upload free version of ffmpeg.
|
2016-08-26 00:50:12 +00:00
|
|
|
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
|
2018-09-27 20:14:13 +00:00
|
|
|
ffmpeg_zip = os.path.join(OUT_DIR, ffmpeg)
|
2018-10-08 20:19:40 +00:00
|
|
|
ffmpeg_build_path = os.path.join(SRC_DIR, 'out', 'ffmpeg', 'ffmpeg.zip')
|
2018-09-29 02:27:29 +00:00
|
|
|
shutil.copy2(ffmpeg_build_path, ffmpeg_zip)
|
2018-09-27 20:14:13 +00:00
|
|
|
upload_electron(release, ffmpeg_zip, args)
|
2016-02-19 02:26:18 +00:00
|
|
|
|
2018-04-02 20:54:39 +00:00
|
|
|
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
|
2018-09-27 20:14:13 +00:00
|
|
|
chromedriver_zip = os.path.join(OUT_DIR, chromedriver)
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'chromedriver.zip'), chromedriver_zip)
|
|
|
|
upload_electron(release, chromedriver_zip, args)
|
2018-04-02 20:54:39 +00:00
|
|
|
|
2018-09-27 21:26:49 +00:00
|
|
|
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
|
|
|
|
mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
|
2020-07-22 08:45:18 +00:00
|
|
|
if get_target_arch().startswith('arm') and PLATFORM != 'darwin':
|
2018-04-02 20:54:39 +00:00
|
|
|
# Upload the x64 binary for arm/arm64 mksnapshot
|
|
|
|
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64')
|
2018-09-27 21:26:49 +00:00
|
|
|
mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
|
|
|
|
|
|
|
|
shutil.copy2(os.path.join(OUT_DIR, 'mksnapshot.zip'), mksnapshot_zip)
|
|
|
|
upload_electron(release, mksnapshot_zip, args)
|
2017-11-02 08:06:28 +00:00
|
|
|
|
2019-10-31 20:11:51 +00:00
|
|
|
if PLATFORM == 'linux' and get_target_arch() == 'x64':
|
|
|
|
# Upload the hunspell dictionaries only from the linux x64 build
|
2020-10-19 11:55:27 +00:00
|
|
|
hunspell_dictionaries_zip = os.path.join(
|
|
|
|
OUT_DIR, 'hunspell_dictionaries.zip')
|
2019-10-31 20:11:51 +00:00
|
|
|
upload_electron(release, hunspell_dictionaries_zip, args)
|
|
|
|
|
2018-05-23 19:53:48 +00:00
|
|
|
if not tag_exists and not args.upload_to_s3:
|
|
|
|
# Upload symbols to symbol server.
|
2019-06-24 17:18:04 +00:00
|
|
|
run_python_upload_script('upload-symbols.py')
|
2018-05-23 19:53:48 +00:00
|
|
|
if PLATFORM == 'win32':
|
2019-06-24 17:18:04 +00:00
|
|
|
run_python_upload_script('upload-node-headers.py', '-v', args.version)
|
2015-01-23 02:03:15 +00:00
|
|
|
|
2020-01-13 20:40:13 +00:00
|
|
|
if PLATFORM == 'win32':
|
|
|
|
toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
|
|
|
|
with ZipFile(toolchain_profile_zip, 'w') as myzip:
|
2020-10-19 11:55:27 +00:00
|
|
|
myzip.write(
|
|
|
|
os.path.join(OUT_DIR, 'windows_toolchain_profile.json'),
|
|
|
|
'toolchain_profile.json')
|
2020-01-13 20:40:13 +00:00
|
|
|
upload_electron(release, toolchain_profile_zip, args)
|
|
|
|
|
2013-08-31 02:48:47 +00:00
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser(description='upload distribution file')
|
2013-09-26 11:49:09 +00:00
|
|
|
parser.add_argument('-v', '--version', help='Specify the version',
|
2016-05-24 17:27:46 +00:00
|
|
|
default=ELECTRON_VERSION)
|
2017-09-26 01:43:43 +00:00
|
|
|
parser.add_argument('-o', '--overwrite',
|
|
|
|
help='Overwrite a published release',
|
|
|
|
action='store_true')
|
2014-03-01 01:36:48 +00:00
|
|
|
parser.add_argument('-p', '--publish-release',
|
|
|
|
help='Publish the release',
|
|
|
|
action='store_true')
|
2017-11-02 08:42:09 +00:00
|
|
|
parser.add_argument('-s', '--upload_to_s3',
|
|
|
|
help='Upload assets to s3 bucket',
|
|
|
|
dest='upload_to_s3',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
required=False)
|
2020-11-09 20:30:43 +00:00
|
|
|
parser.add_argument('--verbose', help='Mooooorreee logs')
|
2013-08-31 02:48:47 +00:00
|
|
|
return parser.parse_args()
|
2013-06-24 09:56:51 +00:00
|
|
|
|
|
|
|
|
2019-06-24 17:18:04 +00:00
|
|
|
def run_python_upload_script(script, *args):
|
|
|
|
script_path = os.path.join(
|
|
|
|
ELECTRON_DIR, 'script', 'release', 'uploaders', script)
|
2020-10-08 00:13:06 +00:00
|
|
|
print(execute([sys.executable, script_path] + list(args)))
|
2016-08-01 02:00:19 +00:00
|
|
|
|
|
|
|
|
2016-05-24 17:27:46 +00:00
|
|
|
def get_electron_build_version():
|
2020-10-30 10:05:38 +00:00
|
|
|
if get_target_arch().startswith('arm') or 'CI' in os.environ:
|
2015-07-03 06:30:59 +00:00
|
|
|
# In CI we just build as told.
|
2016-05-24 17:27:46 +00:00
|
|
|
return ELECTRON_VERSION
|
2018-09-27 20:14:13 +00:00
|
|
|
electron = get_electron_exec()
|
2016-05-24 17:27:46 +00:00
|
|
|
return subprocess.check_output([electron, '--version']).strip()
|
2013-10-03 03:48:13 +00:00
|
|
|
|
|
|
|
|
2020-01-29 12:22:04 +00:00
|
|
|
class NonZipFileError(ValueError):
|
|
|
|
"""Raised when a given file does not appear to be a zip"""
|
|
|
|
|
|
|
|
|
|
|
|
def zero_zip_date_time(fname):
|
|
|
|
""" Wrap strip-zip zero_zip_date_time within a file opening operation """
|
|
|
|
try:
|
2020-10-19 11:55:27 +00:00
|
|
|
with open(fname, 'r+b') as f:
|
|
|
|
_zero_zip_date_time(f)
|
2020-01-29 12:22:04 +00:00
|
|
|
except:
|
|
|
|
raise NonZipFileError(fname)
|
|
|
|
|
|
|
|
|
|
|
|
def _zero_zip_date_time(zip_):
|
2020-04-09 17:39:46 +00:00
|
|
|
def purify_extra_data(mm, offset, length, compressed_size=0):
|
2020-04-01 14:14:41 +00:00
|
|
|
extra_header_struct = Struct("<HH")
|
|
|
|
# 0. id
|
|
|
|
# 1. length
|
|
|
|
STRIPZIP_OPTION_HEADER = 0xFFFF
|
|
|
|
EXTENDED_TIME_DATA = 0x5455
|
|
|
|
# Some sort of extended time data, see
|
|
|
|
# ftp://ftp.info-zip.org/pub/infozip/src/zip30.zip ./proginfo/extrafld.txt
|
|
|
|
# fallthrough
|
|
|
|
UNIX_EXTRA_DATA = 0x7875
|
|
|
|
# Unix extra data; UID / GID stuff, see
|
|
|
|
# ftp://ftp.info-zip.org/pub/infozip/src/zip30.zip ./proginfo/extrafld.txt
|
2020-04-09 17:39:46 +00:00
|
|
|
ZIP64_EXTRA_HEADER = 0x0001
|
|
|
|
zip64_extra_struct = Struct("<HHQQ")
|
|
|
|
# ZIP64.
|
|
|
|
# When a ZIP64 extra field is present his 8byte length
|
|
|
|
# will override the 4byte length defined in canonical zips.
|
|
|
|
# This is in the form:
|
|
|
|
# - 0x0001 (header_id)
|
|
|
|
# - 0x0010 [16] (header_length)
|
|
|
|
# - ... (8byte uncompressed_length)
|
|
|
|
# - ... (8byte compressed_length)
|
2020-04-01 14:14:41 +00:00
|
|
|
mlen = offset + length
|
|
|
|
|
|
|
|
while offset < mlen:
|
|
|
|
values = list(extra_header_struct.unpack_from(mm, offset))
|
|
|
|
_, header_length = values
|
|
|
|
extra_struct = Struct("<HH" + "B" * header_length)
|
|
|
|
values = list(extra_struct.unpack_from(mm, offset))
|
2020-10-19 11:55:27 +00:00
|
|
|
header_id, header_length = values[:2]
|
2020-04-01 14:14:41 +00:00
|
|
|
|
|
|
|
if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
|
|
|
|
values[0] = STRIPZIP_OPTION_HEADER
|
2020-04-09 17:39:46 +00:00
|
|
|
for i in range(2, len(values)):
|
2020-04-01 14:14:41 +00:00
|
|
|
values[i] = 0xff
|
|
|
|
extra_struct.pack_into(mm, offset, *values)
|
2020-04-09 17:39:46 +00:00
|
|
|
if header_id == ZIP64_EXTRA_HEADER:
|
|
|
|
assert header_length == 16
|
|
|
|
values = list(zip64_extra_struct.unpack_from(mm, offset))
|
2020-10-19 11:55:27 +00:00
|
|
|
header_id, header_length, _, compressed_size = values
|
2020-04-01 14:14:41 +00:00
|
|
|
|
|
|
|
offset += extra_header_struct.size + header_length
|
|
|
|
|
2020-04-09 17:39:46 +00:00
|
|
|
return compressed_size
|
2020-04-01 14:14:41 +00:00
|
|
|
|
|
|
|
FILE_HEADER_SIGNATURE = 0x04034b50
|
|
|
|
CENDIR_HEADER_SIGNATURE = 0x02014b50
|
2020-01-29 12:22:04 +00:00
|
|
|
|
2020-04-01 14:14:41 +00:00
|
|
|
archive_size = os.fstat(zip_.fileno()).st_size
|
2020-01-29 12:22:04 +00:00
|
|
|
signature_struct = Struct("<L")
|
|
|
|
local_file_header_struct = Struct("<LHHHHHLLLHH")
|
2020-04-01 14:14:41 +00:00
|
|
|
# 0. L signature
|
|
|
|
# 1. H version_needed
|
|
|
|
# 2. H gp_bits
|
|
|
|
# 3. H compression_method
|
|
|
|
# 4. H last_mod_time
|
|
|
|
# 5. H last_mod_date
|
|
|
|
# 6. L crc32
|
|
|
|
# 7. L compressed_size
|
|
|
|
# 8. L uncompressed_size
|
|
|
|
# 9. H name_length
|
|
|
|
# 10. H extra_field_length
|
2020-01-29 12:22:04 +00:00
|
|
|
central_directory_header_struct = Struct("<LHHHHHHLLLHHHHHLL")
|
2020-04-01 14:14:41 +00:00
|
|
|
# 0. L signature
|
|
|
|
# 1. H version_made_by
|
|
|
|
# 2. H version_needed
|
|
|
|
# 3. H gp_bits
|
|
|
|
# 4. H compression_method
|
|
|
|
# 5. H last_mod_time
|
|
|
|
# 6. H last_mod_date
|
|
|
|
# 7. L crc32
|
|
|
|
# 8. L compressed_size
|
|
|
|
# 9. L uncompressed_size
|
|
|
|
# 10. H file_name_length
|
|
|
|
# 11. H extra_field_length
|
|
|
|
# 12. H file_comment_length
|
|
|
|
# 13. H disk_number_start
|
|
|
|
# 14. H internal_attr
|
|
|
|
# 15. L external_attr
|
|
|
|
# 16. L rel_offset_local_header
|
2020-01-29 12:22:04 +00:00
|
|
|
offset = 0
|
|
|
|
|
|
|
|
mm = mmap.mmap(zip_.fileno(), 0)
|
|
|
|
while offset < archive_size:
|
2020-04-01 14:14:41 +00:00
|
|
|
if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
|
2020-01-29 12:22:04 +00:00
|
|
|
break
|
|
|
|
values = list(local_file_header_struct.unpack_from(mm, offset))
|
2020-10-19 11:55:27 +00:00
|
|
|
compressed_size, _, name_length, extra_field_length = values[7:11]
|
2020-04-01 14:14:41 +00:00
|
|
|
# reset last_mod_time
|
2020-01-29 12:22:04 +00:00
|
|
|
values[4] = 0
|
2020-04-01 14:14:41 +00:00
|
|
|
# reset last_mod_date
|
2020-01-29 12:22:04 +00:00
|
|
|
values[5] = 0x21
|
|
|
|
local_file_header_struct.pack_into(mm, offset, *values)
|
2020-04-09 17:39:46 +00:00
|
|
|
offset += local_file_header_struct.size + name_length
|
2020-04-01 14:14:41 +00:00
|
|
|
if extra_field_length != 0:
|
2020-10-19 11:55:27 +00:00
|
|
|
compressed_size = purify_extra_data(mm, offset, extra_field_length,
|
|
|
|
compressed_size)
|
2020-04-09 17:39:46 +00:00
|
|
|
offset += compressed_size + extra_field_length
|
2020-01-29 12:22:04 +00:00
|
|
|
|
|
|
|
while offset < archive_size:
|
2020-04-01 14:14:41 +00:00
|
|
|
if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
|
2020-01-29 12:22:04 +00:00
|
|
|
break
|
|
|
|
values = list(central_directory_header_struct.unpack_from(mm, offset))
|
2020-10-19 11:55:27 +00:00
|
|
|
file_name_length, extra_field_length, file_comment_length = values[10:13]
|
2020-04-01 14:14:41 +00:00
|
|
|
# reset last_mod_time
|
2020-01-29 12:22:04 +00:00
|
|
|
values[5] = 0
|
2020-04-01 14:14:41 +00:00
|
|
|
# reset last_mod_date
|
2020-01-29 12:22:04 +00:00
|
|
|
values[6] = 0x21
|
|
|
|
central_directory_header_struct.pack_into(mm, offset, *values)
|
2020-10-19 11:55:27 +00:00
|
|
|
offset += central_directory_header_struct.size
|
|
|
|
offset += file_name_length + extra_field_length + file_comment_length
|
2020-04-01 14:14:41 +00:00
|
|
|
if extra_field_length != 0:
|
|
|
|
purify_extra_data(mm, offset - extra_field_length, extra_field_length)
|
2020-01-29 12:22:04 +00:00
|
|
|
|
|
|
|
if offset == 0:
|
|
|
|
raise NonZipFileError(zip_.name)
|
|
|
|
|
|
|
|
|
2018-07-09 22:46:31 +00:00
|
|
|
def upload_electron(release, file_path, args):
|
2018-06-13 20:11:26 +00:00
|
|
|
filename = os.path.basename(file_path)
|
2017-11-02 08:06:28 +00:00
|
|
|
|
2020-01-29 12:22:04 +00:00
|
|
|
# Strip zip non determinism before upload, in-place operation
|
|
|
|
try:
|
|
|
|
zero_zip_date_time(file_path)
|
|
|
|
except NonZipFileError:
|
|
|
|
pass
|
|
|
|
|
2017-11-02 08:06:28 +00:00
|
|
|
# if upload_to_s3 is set, skip github upload.
|
2018-05-22 17:50:44 +00:00
|
|
|
if args.upload_to_s3:
|
2017-11-02 08:06:28 +00:00
|
|
|
bucket, access_key, secret_key = s3_config()
|
2018-07-09 22:46:31 +00:00
|
|
|
key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
|
2018-05-22 17:50:44 +00:00
|
|
|
args.upload_timestamp)
|
2017-11-02 08:06:28 +00:00
|
|
|
s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
|
|
|
|
key_prefix, [file_path])
|
2018-07-09 22:46:31 +00:00
|
|
|
upload_sha256_checksum(args.version, file_path, key_prefix)
|
2018-06-13 20:11:26 +00:00
|
|
|
s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
|
2019-06-15 17:26:09 +00:00
|
|
|
print('{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix))
|
2017-11-02 08:06:28 +00:00
|
|
|
return
|
|
|
|
|
2015-07-03 07:31:55 +00:00
|
|
|
# Upload the file.
|
2018-08-16 15:57:12 +00:00
|
|
|
upload_io_to_github(release, filename, file_path, args.version)
|
2016-07-25 02:19:23 +00:00
|
|
|
|
2016-08-01 12:03:55 +00:00
|
|
|
# Upload the checksum file.
|
2018-07-09 22:46:31 +00:00
|
|
|
upload_sha256_checksum(args.version, file_path)
|
2016-08-01 12:03:55 +00:00
|
|
|
|
2016-07-25 02:19:23 +00:00
|
|
|
|
2018-08-16 15:57:12 +00:00
|
|
|
def upload_io_to_github(release, filename, filepath, version):
|
2019-06-15 17:26:09 +00:00
|
|
|
print('Uploading %s to Github' % \
|
|
|
|
(filename))
|
2019-06-24 17:18:04 +00:00
|
|
|
script_path = os.path.join(
|
|
|
|
ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-to-github.js')
|
2018-08-16 15:57:12 +00:00
|
|
|
execute(['node', script_path, filepath, filename, str(release['id']),
|
|
|
|
version])
|
2013-09-26 12:32:11 +00:00
|
|
|
|
|
|
|
|
2017-11-02 08:06:28 +00:00
|
|
|
def upload_sha256_checksum(version, file_path, key_prefix=None):
|
2016-08-01 12:03:55 +00:00
|
|
|
bucket, access_key, secret_key = s3_config()
|
|
|
|
checksum_path = '{}.sha256sum'.format(file_path)
|
2017-11-02 08:06:28 +00:00
|
|
|
if key_prefix is None:
|
|
|
|
key_prefix = 'atom-shell/tmp/{0}'.format(version)
|
2016-08-01 12:03:55 +00:00
|
|
|
sha256 = hashlib.sha256()
|
|
|
|
with open(file_path, 'rb') as f:
|
|
|
|
sha256.update(f.read())
|
|
|
|
|
|
|
|
filename = os.path.basename(file_path)
|
|
|
|
with open(checksum_path, 'w') as checksum:
|
2016-08-18 15:35:54 +00:00
|
|
|
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
|
2016-08-01 12:03:55 +00:00
|
|
|
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
|
2017-11-02 08:06:28 +00:00
|
|
|
key_prefix, [checksum_path])
|
2016-08-01 12:03:55 +00:00
|
|
|
|
|
|
|
|
2013-09-26 11:49:09 +00:00
|
|
|
def auth_token():
|
2016-05-25 00:35:30 +00:00
|
|
|
token = get_env_var('GITHUB_TOKEN')
|
2016-05-24 17:27:46 +00:00
|
|
|
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
|
2013-09-26 11:49:09 +00:00
|
|
|
'environment variable, which is your personal token')
|
|
|
|
assert token, message
|
|
|
|
return token
|
2013-06-24 09:56:51 +00:00
|
|
|
|
|
|
|
|
2018-07-09 22:46:31 +00:00
|
|
|
def get_release(version):
|
2019-06-24 17:18:04 +00:00
|
|
|
script_path = os.path.join(
|
|
|
|
ELECTRON_DIR, 'script', 'release', 'find-github-release.js')
|
2018-07-09 22:46:31 +00:00
|
|
|
release_info = execute(['node', script_path, version])
|
|
|
|
release = json.loads(release_info)
|
|
|
|
return release
|
|
|
|
|
2013-06-24 09:56:51 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|