Build spec modules against locally generated node headers tarball
This commit is contained in:
parent
c6c93211be
commit
c19c14d0b3
4 changed files with 117 additions and 80 deletions
|
@ -63,6 +63,7 @@ def main():
|
|||
create_chrome_version_h()
|
||||
touch_config_gypi()
|
||||
run_update(defines, args.msvs)
|
||||
create_node_headers()
|
||||
update_electron_modules('spec', args.target_arch)
|
||||
|
||||
|
||||
|
@ -184,9 +185,11 @@ def update_node_modules(dirname, env=None):
|
|||
|
||||
def update_electron_modules(dirname, target_arch):
|
||||
env = os.environ.copy()
|
||||
version = get_electron_version()
|
||||
env['npm_config_arch'] = target_arch
|
||||
env['npm_config_target'] = get_electron_version()
|
||||
env['npm_config_disturl'] = 'https://atom.io/download/electron'
|
||||
env['npm_config_target'] = version
|
||||
env['npm_config_tarball'] = os.path.join(SOURCE_ROOT, 'dist',
|
||||
'node-{0}.tar.gz'.format(version))
|
||||
update_node_modules(dirname, env)
|
||||
|
||||
|
||||
|
@ -260,5 +263,11 @@ def run_update(defines, msvs):
|
|||
execute_stdout(args)
|
||||
|
||||
|
||||
def create_node_headers():
|
||||
execute_stdout([sys.executable,
|
||||
os.path.join(SOURCE_ROOT, 'script', 'create-node-headers.py'),
|
||||
'--version', get_electron_version()])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
104
script/create-node-headers.py
Executable file
104
script/create-node-headers.py
Executable file
|
@ -0,0 +1,104 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
|
||||
from lib.util import safe_mkdir, scoped_cwd
|
||||
|
||||
|
||||
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
|
||||
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
|
||||
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
|
||||
|
||||
HEADERS_SUFFIX = [
|
||||
'.h',
|
||||
'.gypi',
|
||||
]
|
||||
HEADERS_DIRS = [
|
||||
'src',
|
||||
'deps/http_parser',
|
||||
'deps/zlib',
|
||||
'deps/uv',
|
||||
'deps/npm',
|
||||
'deps/mdb_v8',
|
||||
]
|
||||
HEADERS_FILES = [
|
||||
'common.gypi',
|
||||
'config.gypi',
|
||||
]
|
||||
|
||||
|
||||
def main():
|
||||
safe_mkdir(DIST_DIR)
|
||||
|
||||
args = parse_args()
|
||||
node_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
|
||||
iojs_headers_dir = os.path.join(DIST_DIR, 'iojs-{0}'.format(args.version))
|
||||
iojs2_headers_dir = os.path.join(DIST_DIR,
|
||||
'iojs-{0}-headers'.format(args.version))
|
||||
|
||||
copy_headers(node_headers_dir)
|
||||
create_header_tarball(node_headers_dir)
|
||||
copy_headers(iojs_headers_dir)
|
||||
create_header_tarball(iojs_headers_dir)
|
||||
copy_headers(iojs2_headers_dir)
|
||||
create_header_tarball(iojs2_headers_dir)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='create node header tarballs')
|
||||
parser.add_argument('-v', '--version', help='Specify the version',
|
||||
required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def copy_headers(dist_headers_dir):
|
||||
safe_mkdir(dist_headers_dir)
|
||||
|
||||
# Copy standard node headers from node. repository.
|
||||
for include_path in HEADERS_DIRS:
|
||||
abs_path = os.path.join(NODE_DIR, include_path)
|
||||
for dirpath, _, filenames in os.walk(abs_path):
|
||||
for filename in filenames:
|
||||
extension = os.path.splitext(filename)[1]
|
||||
if extension not in HEADERS_SUFFIX:
|
||||
continue
|
||||
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
|
||||
dist_headers_dir)
|
||||
for other_file in HEADERS_FILES:
|
||||
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
|
||||
dist_headers_dir)
|
||||
|
||||
# Copy V8 headers from chromium's repository.
|
||||
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
|
||||
'libchromiumcontent', 'src')
|
||||
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
|
||||
for filename in filenames:
|
||||
extension = os.path.splitext(filename)[1]
|
||||
if extension not in HEADERS_SUFFIX:
|
||||
continue
|
||||
copy_source_file(os.path.join(dirpath, filename), src,
|
||||
os.path.join(dist_headers_dir, 'deps'))
|
||||
|
||||
|
||||
def create_header_tarball(dist_headers_dir):
|
||||
target = dist_headers_dir + '.tar.gz'
|
||||
with scoped_cwd(DIST_DIR):
|
||||
tarball = tarfile.open(name=target, mode='w:gz')
|
||||
tarball.add(os.path.relpath(dist_headers_dir))
|
||||
tarball.close()
|
||||
|
||||
|
||||
def copy_source_file(source, start, destination):
|
||||
relative = os.path.relpath(source, start=start)
|
||||
final_destination = os.path.join(destination, relative)
|
||||
safe_mkdir(os.path.dirname(final_destination))
|
||||
shutil.copy2(source, final_destination)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -5,50 +5,17 @@ import glob
|
|||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
|
||||
from lib.config import PLATFORM, get_target_arch, s3_config
|
||||
from lib.util import execute, safe_mkdir, scoped_cwd, s3put
|
||||
from lib.util import safe_mkdir, scoped_cwd, s3put
|
||||
|
||||
|
||||
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
|
||||
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
|
||||
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
|
||||
|
||||
HEADERS_SUFFIX = [
|
||||
'.h',
|
||||
'.gypi',
|
||||
]
|
||||
HEADERS_DIRS = [
|
||||
'src',
|
||||
'deps/http_parser',
|
||||
'deps/zlib',
|
||||
'deps/uv',
|
||||
'deps/npm',
|
||||
'deps/mdb_v8',
|
||||
]
|
||||
HEADERS_FILES = [
|
||||
'common.gypi',
|
||||
'config.gypi',
|
||||
]
|
||||
|
||||
|
||||
def main():
|
||||
safe_mkdir(DIST_DIR)
|
||||
|
||||
args = parse_args()
|
||||
node_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
|
||||
iojs_headers_dir = os.path.join(DIST_DIR, 'iojs-{0}'.format(args.version))
|
||||
iojs2_headers_dir = os.path.join(DIST_DIR,
|
||||
'iojs-{0}-headers'.format(args.version))
|
||||
|
||||
copy_headers(node_headers_dir)
|
||||
create_header_tarball(node_headers_dir)
|
||||
copy_headers(iojs_headers_dir)
|
||||
create_header_tarball(iojs_headers_dir)
|
||||
copy_headers(iojs2_headers_dir)
|
||||
create_header_tarball(iojs2_headers_dir)
|
||||
|
||||
# Upload node's headers to S3.
|
||||
bucket, access_key, secret_key = s3_config()
|
||||
|
@ -62,50 +29,6 @@ def parse_args():
|
|||
return parser.parse_args()
|
||||
|
||||
|
||||
def copy_headers(dist_headers_dir):
|
||||
safe_mkdir(dist_headers_dir)
|
||||
|
||||
# Copy standard node headers from node. repository.
|
||||
for include_path in HEADERS_DIRS:
|
||||
abs_path = os.path.join(NODE_DIR, include_path)
|
||||
for dirpath, _, filenames in os.walk(abs_path):
|
||||
for filename in filenames:
|
||||
extension = os.path.splitext(filename)[1]
|
||||
if extension not in HEADERS_SUFFIX:
|
||||
continue
|
||||
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
|
||||
dist_headers_dir)
|
||||
for other_file in HEADERS_FILES:
|
||||
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
|
||||
dist_headers_dir)
|
||||
|
||||
# Copy V8 headers from chromium's repository.
|
||||
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
|
||||
'libchromiumcontent', 'src')
|
||||
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
|
||||
for filename in filenames:
|
||||
extension = os.path.splitext(filename)[1]
|
||||
if extension not in HEADERS_SUFFIX:
|
||||
continue
|
||||
copy_source_file(os.path.join(dirpath, filename), src,
|
||||
os.path.join(dist_headers_dir, 'deps'))
|
||||
|
||||
|
||||
def create_header_tarball(dist_headers_dir):
|
||||
target = dist_headers_dir + '.tar.gz'
|
||||
with scoped_cwd(DIST_DIR):
|
||||
tarball = tarfile.open(name=target, mode='w:gz')
|
||||
tarball.add(os.path.relpath(dist_headers_dir))
|
||||
tarball.close()
|
||||
|
||||
|
||||
def copy_source_file(source, start, destination):
|
||||
relative = os.path.relpath(source, start=start)
|
||||
final_destination = os.path.join(destination, relative)
|
||||
safe_mkdir(os.path.dirname(final_destination))
|
||||
shutil.copy2(source, final_destination)
|
||||
|
||||
|
||||
def upload_node(bucket, access_key, secret_key, version):
|
||||
with scoped_cwd(DIST_DIR):
|
||||
s3put(bucket, access_key, secret_key, DIST_DIR,
|
||||
|
|
|
@ -101,6 +101,7 @@ def main():
|
|||
run_python_script('upload-windows-pdb.py')
|
||||
|
||||
# Upload node headers.
|
||||
run_python_script('create-node-headers.py', '-v', args.version)
|
||||
run_python_script('upload-node-headers.py', '-v', args.version)
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue