electron/script/upload-node-headers.py

125 lines
3.6 KiB
Python
Raw Normal View History

#!/usr/bin/env python
import argparse
import glob
import os
import shutil
import sys
import tarfile
2015-04-11 15:40:10 +00:00
from lib.config import PLATFORM, get_target_arch
from lib.util import execute, safe_mkdir, scoped_cwd, s3_config, s3put
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
HEADERS_SUFFIX = [
'.h',
'.gypi',
]
HEADERS_DIRS = [
'src',
'deps/http_parser',
'deps/zlib',
'deps/uv',
'deps/npm',
'deps/mdb_v8',
]
HEADERS_FILES = [
'common.gypi',
'config.gypi',
]
def main():
safe_mkdir(DIST_DIR)
args = parse_args()
dist_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
copy_headers(dist_headers_dir)
create_header_tarball(dist_headers_dir)
# Upload node's headers to S3.
bucket, access_key, secret_key = s3_config()
upload_node(bucket, access_key, secret_key, args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload sumsha file')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
def copy_headers(dist_headers_dir):
safe_mkdir(dist_headers_dir)
# Copy standard node headers from node. repository.
for include_path in HEADERS_DIRS:
abs_path = os.path.join(NODE_DIR, include_path)
for dirpath, _, filenames in os.walk(abs_path):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
dist_headers_dir)
for other_file in HEADERS_FILES:
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
dist_headers_dir)
# Copy V8 headers from chromium's repository.
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
'libchromiumcontent', 'src')
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), src,
os.path.join(dist_headers_dir, 'deps'))
def create_header_tarball(dist_headers_dir):
target = dist_headers_dir + '.tar.gz'
with scoped_cwd(DIST_DIR):
tarball = tarfile.open(name=target, mode='w:gz')
tarball.add(os.path.relpath(dist_headers_dir))
tarball.close()
def copy_source_file(source, start, destination):
relative = os.path.relpath(source, start=start)
final_destination = os.path.join(destination, relative)
safe_mkdir(os.path.dirname(final_destination))
shutil.copy2(source, final_destination)
def upload_node(bucket, access_key, secret_key, version):
with scoped_cwd(DIST_DIR):
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
if PLATFORM == 'win32':
if get_target_arch() == 'ia32':
2015-04-11 15:40:10 +00:00
node_lib = os.path.join(DIST_DIR, 'node.lib')
else:
node_lib = os.path.join(DIST_DIR, 'x64', 'node.lib')
safe_mkdir(os.path.dirname(node_lib))
2015-04-07 10:29:14 +00:00
# Copy atom.lib to node.lib
2015-04-10 07:45:45 +00:00
atom_lib = os.path.join(OUT_DIR, 'node.dll.lib')
2015-04-07 10:29:14 +00:00
shutil.copy2(atom_lib, node_lib)
2015-04-11 15:40:10 +00:00
# Upload the node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [node_lib])
if __name__ == '__main__':
sys.exit(main())