2013-06-20 15:10:00 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2013-06-20 14:51:58 +00:00
|
|
|
import atexit
|
2013-06-29 03:36:02 +00:00
|
|
|
import contextlib
|
2018-08-17 19:01:10 +00:00
|
|
|
import datetime
|
2013-06-20 15:10:00 +00:00
|
|
|
import errno
|
2015-07-01 09:17:44 +00:00
|
|
|
import platform
|
|
|
|
import re
|
2013-06-20 14:51:58 +00:00
|
|
|
import shutil
|
2015-02-10 09:52:33 +00:00
|
|
|
import ssl
|
2013-06-21 02:32:57 +00:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2013-06-20 14:51:58 +00:00
|
|
|
import tarfile
|
|
|
|
import tempfile
|
|
|
|
import urllib2
|
2013-06-20 15:10:00 +00:00
|
|
|
import os
|
2013-06-20 15:23:22 +00:00
|
|
|
import zipfile
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2017-05-25 21:53:42 +00:00
|
|
|
from config import is_verbose_mode, PLATFORM
|
2016-05-10 03:44:56 +00:00
|
|
|
from env_util import get_vs_env
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2016-08-01 02:00:19 +00:00
|
|
|
BOTO_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..', 'vendor',
|
|
|
|
'boto'))
|
|
|
|
|
2017-05-25 21:53:42 +00:00
|
|
|
NPM = 'npm'
|
|
|
|
if sys.platform in ['win32', 'cygwin']:
|
|
|
|
NPM += '.cmd'
|
|
|
|
|
2015-07-01 09:17:44 +00:00
|
|
|
|
|
|
|
def get_host_arch():
|
|
|
|
"""Returns the host architecture with a predictable string."""
|
|
|
|
host_arch = platform.machine()
|
|
|
|
|
|
|
|
# Convert machine type to format recognized by gyp.
|
|
|
|
if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
|
|
|
|
host_arch = 'ia32'
|
|
|
|
elif host_arch in ['x86_64', 'amd64']:
|
|
|
|
host_arch = 'x64'
|
|
|
|
elif host_arch.startswith('arm'):
|
|
|
|
host_arch = 'arm'
|
|
|
|
|
|
|
|
# platform.machine is based on running kernel. It's possible to use 64-bit
|
|
|
|
# kernel with 32-bit userland, e.g. to give linker slightly more memory.
|
|
|
|
# Distinguish between different userland bitness by querying
|
|
|
|
# the python binary.
|
|
|
|
if host_arch == 'x64' and platform.architecture()[0] == '32bit':
|
|
|
|
host_arch = 'ia32'
|
|
|
|
|
|
|
|
return host_arch
|
|
|
|
|
|
|
|
|
2013-06-20 15:10:00 +00:00
|
|
|
def tempdir(prefix=''):
|
|
|
|
directory = tempfile.mkdtemp(prefix=prefix)
|
2013-06-20 14:51:58 +00:00
|
|
|
atexit.register(shutil.rmtree, directory)
|
2013-06-20 15:10:00 +00:00
|
|
|
return directory
|
|
|
|
|
2014-08-12 12:28:18 +00:00
|
|
|
|
2013-06-29 03:36:02 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def scoped_cwd(path):
|
|
|
|
cwd = os.getcwd()
|
|
|
|
os.chdir(path)
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
os.chdir(cwd)
|
|
|
|
|
|
|
|
|
2014-08-09 01:22:06 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def scoped_env(key, value):
|
|
|
|
origin = ''
|
|
|
|
if key in os.environ:
|
|
|
|
origin = os.environ[key]
|
|
|
|
os.environ[key] = value
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
os.environ[key] = origin
|
|
|
|
|
|
|
|
|
2013-06-20 15:10:00 +00:00
|
|
|
def download(text, url, path):
|
2014-07-21 08:31:51 +00:00
|
|
|
safe_mkdir(os.path.dirname(path))
|
2014-08-08 06:08:01 +00:00
|
|
|
with open(path, 'wb') as local_file:
|
2015-02-10 09:52:33 +00:00
|
|
|
if hasattr(ssl, '_create_unverified_context'):
|
|
|
|
ssl._create_default_https_context = ssl._create_unverified_context
|
|
|
|
|
2013-06-20 15:10:00 +00:00
|
|
|
web_file = urllib2.urlopen(url)
|
|
|
|
file_size = int(web_file.info().getheaders("Content-Length")[0])
|
|
|
|
downloaded_size = 0
|
|
|
|
block_size = 128
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2018-08-17 19:01:10 +00:00
|
|
|
ci = os.environ.get('CI') is not None
|
2014-02-17 09:50:25 +00:00
|
|
|
|
2013-06-20 14:51:58 +00:00
|
|
|
while True:
|
|
|
|
buf = web_file.read(block_size)
|
|
|
|
if not buf:
|
|
|
|
break
|
|
|
|
|
|
|
|
downloaded_size += len(buf)
|
|
|
|
local_file.write(buf)
|
|
|
|
|
2014-02-17 09:50:25 +00:00
|
|
|
if not ci:
|
|
|
|
percent = downloaded_size * 100. / file_size
|
|
|
|
status = "\r%s %10d [%3.1f%%]" % (text, downloaded_size, percent)
|
|
|
|
print status,
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2014-02-17 09:50:25 +00:00
|
|
|
if ci:
|
|
|
|
print "%s done." % (text)
|
|
|
|
else:
|
|
|
|
print
|
2014-07-21 08:31:51 +00:00
|
|
|
return path
|
2013-06-20 14:51:58 +00:00
|
|
|
|
|
|
|
|
2013-06-20 15:23:22 +00:00
|
|
|
def extract_tarball(tarball_path, member, destination):
|
2013-06-20 14:51:58 +00:00
|
|
|
with tarfile.open(tarball_path) as tarball:
|
2013-06-20 15:23:22 +00:00
|
|
|
tarball.extract(member, destination)
|
|
|
|
|
|
|
|
|
|
|
|
def extract_zip(zip_path, destination):
|
2013-06-21 02:32:57 +00:00
|
|
|
if sys.platform == 'darwin':
|
|
|
|
# Use unzip command on Mac to keep symbol links in zip file work.
|
2014-02-26 14:08:01 +00:00
|
|
|
execute(['unzip', zip_path, '-d', destination])
|
2013-06-21 02:32:57 +00:00
|
|
|
else:
|
|
|
|
with zipfile.ZipFile(zip_path) as z:
|
|
|
|
z.extractall(destination)
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2013-10-26 09:23:16 +00:00
|
|
|
def make_zip(zip_file_path, files, dirs):
|
2013-08-31 01:37:02 +00:00
|
|
|
safe_unlink(zip_file_path)
|
|
|
|
if sys.platform == 'darwin':
|
2013-10-26 09:23:16 +00:00
|
|
|
files += dirs
|
2014-02-26 14:08:01 +00:00
|
|
|
execute(['zip', '-r', '-y', zip_file_path] + files)
|
2013-08-31 01:37:02 +00:00
|
|
|
else:
|
2013-10-26 09:42:12 +00:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_DEFLATED)
|
2013-08-31 01:37:02 +00:00
|
|
|
for filename in files:
|
|
|
|
zip_file.write(filename, filename)
|
2013-10-26 09:23:16 +00:00
|
|
|
for dirname in dirs:
|
|
|
|
for root, _, filenames in os.walk(dirname):
|
|
|
|
for f in filenames:
|
|
|
|
zip_file.write(os.path.join(root, f))
|
2013-08-31 01:37:02 +00:00
|
|
|
zip_file.close()
|
|
|
|
|
2013-06-20 14:51:58 +00:00
|
|
|
|
2013-06-24 09:51:48 +00:00
|
|
|
def rm_rf(path):
|
|
|
|
try:
|
|
|
|
shutil.rmtree(path)
|
2015-07-03 07:07:11 +00:00
|
|
|
except OSError:
|
|
|
|
pass
|
2013-06-24 09:51:48 +00:00
|
|
|
|
|
|
|
|
2013-06-20 14:51:58 +00:00
|
|
|
def safe_unlink(path):
|
|
|
|
try:
|
|
|
|
os.unlink(path)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
def safe_mkdir(path):
|
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise
|
2013-08-12 07:01:05 +00:00
|
|
|
|
|
|
|
|
2017-11-21 08:07:26 +00:00
|
|
|
def execute(argv, env=os.environ, cwd=None):
|
2014-12-08 17:02:08 +00:00
|
|
|
if is_verbose_mode():
|
2014-12-07 15:42:59 +00:00
|
|
|
print ' '.join(argv)
|
2014-02-26 14:08:01 +00:00
|
|
|
try:
|
2017-11-21 08:07:26 +00:00
|
|
|
output = subprocess.check_output(argv, stderr=subprocess.STDOUT, env=env, cwd=cwd)
|
2014-12-08 17:02:08 +00:00
|
|
|
if is_verbose_mode():
|
2014-08-12 12:23:59 +00:00
|
|
|
print output
|
|
|
|
return output
|
2014-02-26 14:08:01 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
print e.output
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
2017-05-22 07:52:40 +00:00
|
|
|
def execute_stdout(argv, env=os.environ, cwd=None):
|
2014-12-08 17:02:08 +00:00
|
|
|
if is_verbose_mode():
|
2014-12-07 23:42:55 +00:00
|
|
|
print ' '.join(argv)
|
|
|
|
try:
|
2017-05-22 07:52:40 +00:00
|
|
|
subprocess.check_call(argv, env=env, cwd=cwd)
|
2014-12-07 23:42:55 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
print e.output
|
|
|
|
raise e
|
|
|
|
else:
|
2017-11-21 08:07:26 +00:00
|
|
|
execute(argv, env, cwd)
|
2015-04-12 04:45:18 +00:00
|
|
|
|
|
|
|
|
2016-05-25 16:10:46 +00:00
|
|
|
def electron_gyp():
|
2015-04-12 04:45:18 +00:00
|
|
|
SOURCE_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
2016-03-31 16:23:04 +00:00
|
|
|
gyp = os.path.join(SOURCE_ROOT, 'electron.gyp')
|
2015-04-12 04:45:18 +00:00
|
|
|
with open(gyp) as f:
|
2018-03-15 13:56:25 +00:00
|
|
|
obj = eval(f.read());
|
|
|
|
return obj['variables']
|
2014-12-07 23:42:55 +00:00
|
|
|
|
2018-05-11 10:30:43 +00:00
|
|
|
def electron_features():
|
|
|
|
SOURCE_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
|
|
|
gyp = os.path.join(SOURCE_ROOT, 'features.gypi')
|
|
|
|
with open(gyp) as f:
|
|
|
|
obj = eval(f.read());
|
|
|
|
return obj['variables']['variables']
|
2014-12-07 23:42:55 +00:00
|
|
|
|
2016-05-24 17:27:46 +00:00
|
|
|
def get_electron_version():
|
2016-05-25 16:10:46 +00:00
|
|
|
return 'v' + electron_gyp()['version%']
|
2014-07-21 08:31:51 +00:00
|
|
|
|
|
|
|
|
2014-09-20 14:39:52 +00:00
|
|
|
def parse_version(version):
|
|
|
|
if version[0] == 'v':
|
|
|
|
version = version[1:]
|
|
|
|
|
|
|
|
vs = version.split('.')
|
|
|
|
if len(vs) > 4:
|
|
|
|
return vs[0:4]
|
|
|
|
else:
|
|
|
|
return vs + ['0'] * (4 - len(vs))
|
|
|
|
|
|
|
|
|
2016-08-01 02:00:19 +00:00
|
|
|
def boto_path_dirs():
|
|
|
|
return [
|
2015-07-03 14:52:26 +00:00
|
|
|
os.path.join(BOTO_DIR, 'build', 'lib'),
|
2016-08-01 02:00:19 +00:00
|
|
|
os.path.join(BOTO_DIR, 'build', 'lib.linux-x86_64-2.7')
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def run_boto_script(access_key, secret_key, script_name, *args):
|
|
|
|
env = os.environ.copy()
|
|
|
|
env['AWS_ACCESS_KEY_ID'] = access_key
|
|
|
|
env['AWS_SECRET_ACCESS_KEY'] = secret_key
|
|
|
|
env['PYTHONPATH'] = os.path.pathsep.join(
|
|
|
|
[env.get('PYTHONPATH', '')] + boto_path_dirs())
|
|
|
|
|
|
|
|
boto = os.path.join(BOTO_DIR, 'bin', script_name)
|
2016-08-01 12:54:03 +00:00
|
|
|
execute([sys.executable, boto] + list(args), env)
|
2016-08-01 02:00:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
|
|
|
|
args = [
|
2014-07-21 08:31:51 +00:00
|
|
|
'--bucket', bucket,
|
|
|
|
'--prefix', prefix,
|
|
|
|
'--key_prefix', key_prefix,
|
|
|
|
'--grant', 'public-read'
|
|
|
|
] + files
|
|
|
|
|
2016-08-01 02:00:19 +00:00
|
|
|
run_boto_script(access_key, secret_key, 's3put', *args)
|
2016-05-10 03:44:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def import_vs_env(target_arch):
|
2016-05-10 05:00:13 +00:00
|
|
|
if sys.platform != 'win32':
|
2016-05-10 03:44:56 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if target_arch == 'ia32':
|
|
|
|
vs_arch = 'amd64_x86'
|
|
|
|
else:
|
|
|
|
vs_arch = 'x86_amd64'
|
2018-04-05 07:13:30 +00:00
|
|
|
env = get_vs_env('[15.0,16.0)', vs_arch)
|
2016-05-10 03:44:56 +00:00
|
|
|
os.environ.update(env)
|
2017-05-25 21:53:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
def set_clang_env(env):
|
|
|
|
SOURCE_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
|
|
|
llvm_dir = os.path.join(SOURCE_ROOT, 'vendor', 'llvm-build',
|
|
|
|
'Release+Asserts', 'bin')
|
|
|
|
env['CC'] = os.path.join(llvm_dir, 'clang')
|
|
|
|
env['CXX'] = os.path.join(llvm_dir, 'clang++')
|
|
|
|
|
|
|
|
|
|
|
|
def update_electron_modules(dirname, target_arch, nodedir):
|
|
|
|
env = os.environ.copy()
|
|
|
|
version = get_electron_version()
|
|
|
|
env['npm_config_arch'] = target_arch
|
|
|
|
env['npm_config_target'] = version
|
|
|
|
env['npm_config_nodedir'] = nodedir
|
|
|
|
update_node_modules(dirname, env)
|
2017-05-25 22:40:51 +00:00
|
|
|
execute_stdout([NPM, 'rebuild'], env, dirname)
|
2017-05-25 21:53:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_node_modules(dirname, env=None):
|
|
|
|
if env is None:
|
|
|
|
env = os.environ.copy()
|
|
|
|
if PLATFORM == 'linux':
|
|
|
|
# Use prebuilt clang for building native modules.
|
|
|
|
set_clang_env(env)
|
|
|
|
env['npm_config_clang'] = '1'
|
|
|
|
with scoped_cwd(dirname):
|
|
|
|
args = [NPM, 'install']
|
|
|
|
if is_verbose_mode():
|
|
|
|
args += ['--verbose']
|
|
|
|
# Ignore npm install errors when running in CI.
|
|
|
|
if os.environ.has_key('CI'):
|
|
|
|
try:
|
|
|
|
execute_stdout(args, env)
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
execute_stdout(args, env)
|
2018-08-17 19:01:10 +00:00
|
|
|
|
|
|
|
def clean_parse_version(v):
|
|
|
|
return parse_version(v.split("-")[0])
|
|
|
|
|
|
|
|
def is_stable(v):
|
|
|
|
return len(v.split(".")) == 3
|
|
|
|
|
|
|
|
def is_beta(v):
|
|
|
|
return 'beta' in v
|
|
|
|
|
|
|
|
def is_nightly(v):
|
|
|
|
return 'nightly' in v
|
|
|
|
|
|
|
|
def get_nightly_date():
|
|
|
|
return datetime.datetime.today().strftime('%Y%m%d')
|
|
|
|
|
|
|
|
def get_last_major():
|
|
|
|
return execute(['node', 'script/get-last-major-for-master.js'])
|
|
|
|
|
|
|
|
def get_next_nightly(v):
|
|
|
|
pv = clean_parse_version(v)
|
|
|
|
major = pv[0]; minor = pv[1]; patch = pv[2]
|
|
|
|
|
|
|
|
if (is_stable(v)):
|
|
|
|
patch = str(int(pv[2]) + 1)
|
|
|
|
|
|
|
|
if execute(['git', 'rev-parse', '--abbrev-ref', 'HEAD']) == "master":
|
|
|
|
major = str(get_last_major() + 1)
|
|
|
|
minor = '0'
|
|
|
|
patch = '0'
|
|
|
|
|
|
|
|
pre = 'nightly.' + get_nightly_date()
|
|
|
|
return make_version(major, minor, patch, pre)
|
|
|
|
|
|
|
|
def non_empty(thing):
|
|
|
|
return thing.strip() != ''
|
|
|
|
|
|
|
|
def get_next_beta(v):
|
|
|
|
pv = clean_parse_version(v)
|
|
|
|
tag_pattern = 'v' + pv[0] + '.' + pv[1] + '.' + pv[2] + '-beta.*'
|
|
|
|
tag_list = filter(
|
|
|
|
non_empty,
|
|
|
|
execute(['git', 'tag', '--list', '-l', tag_pattern]).strip().split('\n')
|
|
|
|
)
|
|
|
|
if len(tag_list) == 0:
|
|
|
|
return make_version(pv[0] , pv[1], pv[2], 'beta.1')
|
|
|
|
|
|
|
|
lv = parse_version(tag_list[-1])
|
|
|
|
return make_version(lv[0] , lv[1], lv[2], str(int(lv[3]) + 1))
|
|
|
|
|
|
|
|
def get_next_stable_from_pre(v):
|
|
|
|
pv = clean_parse_version(v)
|
|
|
|
major = pv[0]; minor = pv[1]; patch = pv[2]
|
|
|
|
return make_version(major, minor, patch)
|
|
|
|
|
|
|
|
def get_next_stable_from_stable(v):
|
|
|
|
pv = clean_parse_version(v)
|
|
|
|
major = pv[0]; minor = pv[1]; patch = pv[2]
|
|
|
|
return make_version(major, minor, str(int(patch) + 1))
|
|
|
|
|
|
|
|
def make_version(major, minor, patch, pre = None):
|
|
|
|
if pre is None:
|
|
|
|
return major + '.' + minor + '.' + patch
|
|
|
|
return major + "." + minor + "." + patch + '-' + pre
|