2015-01-23 01:11:10 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import argparse
|
2015-01-23 01:18:31 +00:00
|
|
|
import glob
|
2015-01-23 01:11:10 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import tarfile
|
|
|
|
|
2015-04-11 15:40:10 +00:00
|
|
|
from lib.config import PLATFORM, get_target_arch
|
2015-01-23 01:18:31 +00:00
|
|
|
from lib.util import execute, safe_mkdir, scoped_cwd, s3_config, s3put
|
2015-01-23 01:11:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
2015-01-23 01:18:31 +00:00
|
|
|
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
|
|
|
|
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
|
2015-04-03 03:59:14 +00:00
|
|
|
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
|
2015-01-23 01:11:10 +00:00
|
|
|
|
|
|
|
HEADERS_SUFFIX = [
|
|
|
|
'.h',
|
|
|
|
'.gypi',
|
|
|
|
]
|
|
|
|
HEADERS_DIRS = [
|
|
|
|
'src',
|
|
|
|
'deps/http_parser',
|
|
|
|
'deps/zlib',
|
|
|
|
'deps/uv',
|
|
|
|
'deps/npm',
|
|
|
|
'deps/mdb_v8',
|
|
|
|
]
|
|
|
|
HEADERS_FILES = [
|
|
|
|
'common.gypi',
|
|
|
|
'config.gypi',
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
safe_mkdir(DIST_DIR)
|
|
|
|
|
|
|
|
args = parse_args()
|
|
|
|
dist_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
|
2015-01-23 01:18:31 +00:00
|
|
|
|
2015-01-23 01:11:10 +00:00
|
|
|
copy_headers(dist_headers_dir)
|
|
|
|
create_header_tarball(dist_headers_dir)
|
|
|
|
|
2015-01-23 01:18:31 +00:00
|
|
|
# Upload node's headers to S3.
|
|
|
|
bucket, access_key, secret_key = s3_config()
|
|
|
|
upload_node(bucket, access_key, secret_key, args.version)
|
|
|
|
|
|
|
|
# Upload the SHASUMS.txt.
|
|
|
|
execute([sys.executable,
|
|
|
|
os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'),
|
|
|
|
'-v', args.version])
|
|
|
|
|
2015-01-23 01:11:10 +00:00
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser(description='upload sumsha file')
|
|
|
|
parser.add_argument('-v', '--version', help='Specify the version',
|
|
|
|
required=True)
|
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
def copy_headers(dist_headers_dir):
|
|
|
|
safe_mkdir(dist_headers_dir)
|
|
|
|
|
|
|
|
# Copy standard node headers from node. repository.
|
|
|
|
for include_path in HEADERS_DIRS:
|
|
|
|
abs_path = os.path.join(NODE_DIR, include_path)
|
|
|
|
for dirpath, _, filenames in os.walk(abs_path):
|
|
|
|
for filename in filenames:
|
|
|
|
extension = os.path.splitext(filename)[1]
|
|
|
|
if extension not in HEADERS_SUFFIX:
|
|
|
|
continue
|
|
|
|
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
|
|
|
|
dist_headers_dir)
|
|
|
|
for other_file in HEADERS_FILES:
|
|
|
|
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
|
|
|
|
dist_headers_dir)
|
|
|
|
|
|
|
|
# Copy V8 headers from chromium's repository.
|
|
|
|
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
|
|
|
|
'libchromiumcontent', 'src')
|
|
|
|
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
|
|
|
|
for filename in filenames:
|
|
|
|
extension = os.path.splitext(filename)[1]
|
|
|
|
if extension not in HEADERS_SUFFIX:
|
|
|
|
continue
|
|
|
|
copy_source_file(os.path.join(dirpath, filename), src,
|
|
|
|
os.path.join(dist_headers_dir, 'deps'))
|
|
|
|
|
|
|
|
|
|
|
|
def create_header_tarball(dist_headers_dir):
|
|
|
|
target = dist_headers_dir + '.tar.gz'
|
|
|
|
with scoped_cwd(DIST_DIR):
|
|
|
|
tarball = tarfile.open(name=target, mode='w:gz')
|
|
|
|
tarball.add(os.path.relpath(dist_headers_dir))
|
|
|
|
tarball.close()
|
|
|
|
|
|
|
|
|
|
|
|
def copy_source_file(source, start, destination):
|
|
|
|
relative = os.path.relpath(source, start=start)
|
|
|
|
final_destination = os.path.join(destination, relative)
|
|
|
|
safe_mkdir(os.path.dirname(final_destination))
|
|
|
|
shutil.copy2(source, final_destination)
|
|
|
|
|
|
|
|
|
2015-01-23 01:18:31 +00:00
|
|
|
def upload_node(bucket, access_key, secret_key, version):
|
2015-01-23 01:58:54 +00:00
|
|
|
with scoped_cwd(DIST_DIR):
|
|
|
|
s3put(bucket, access_key, secret_key, DIST_DIR,
|
|
|
|
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
|
2015-01-23 01:18:31 +00:00
|
|
|
|
2015-04-11 09:30:52 +00:00
|
|
|
if PLATFORM == 'win32':
|
2015-04-11 15:40:10 +00:00
|
|
|
target_arch = get_target_arch()
|
|
|
|
if target_arch == 'ia32':
|
|
|
|
node_lib = os.path.join(DIST_DIR, 'node.lib')
|
|
|
|
else:
|
|
|
|
node_lib = os.path.join(DIST_DIR, 'x64', 'node.lib')
|
|
|
|
safe_mkdir(os.path.dirname(node_lib))
|
|
|
|
|
2015-04-07 10:29:14 +00:00
|
|
|
# Copy atom.lib to node.lib
|
2015-04-10 07:45:45 +00:00
|
|
|
atom_lib = os.path.join(OUT_DIR, 'node.dll.lib')
|
2015-04-07 10:29:14 +00:00
|
|
|
shutil.copy2(atom_lib, node_lib)
|
2015-01-23 01:18:31 +00:00
|
|
|
|
2015-04-11 15:40:10 +00:00
|
|
|
# Upload the node.lib.
|
|
|
|
s3put(bucket, access_key, secret_key, DIST_DIR,
|
2015-01-23 01:18:31 +00:00
|
|
|
'atom-shell/dist/{0}'.format(version), [node_lib])
|
|
|
|
|
|
|
|
# Upload the index.json
|
2015-01-23 01:58:54 +00:00
|
|
|
with scoped_cwd(SOURCE_ROOT):
|
|
|
|
atom_shell = os.path.join(OUT_DIR, 'atom.exe')
|
|
|
|
index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
|
|
|
|
execute([atom_shell,
|
|
|
|
os.path.join('script', 'dump-version-info.js'),
|
|
|
|
index_json])
|
|
|
|
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
|
|
|
|
[index_json])
|
2015-01-23 01:18:31 +00:00
|
|
|
|
|
|
|
|
2015-01-23 01:11:10 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|