build: mtime cache for patches to speed up build (#25881)

This commit is contained in:
David Sanders 2020-10-20 11:49:57 -07:00 committed by GitHub
parent c4525b4ea6
commit f7945ade07
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 221 additions and 1 deletions

5
.gitignore vendored
View file

@ -65,4 +65,7 @@ ts-gen
# Used to accelerate CI builds
.depshash
.depshash-target
.depshash-target
# Used to accelerate builds after sync
patches/mtime-cache.json

32
DEPS
View file

@ -38,6 +38,9 @@ vars = {
# To be able to build clean Chromium from sources.
'apply_patches': True,
# To use an mtime cache for patched files to speed up builds.
'use_mtime_cache': True,
# To allow in-house builds to checkout those manually.
'checkout_chromium': True,
'checkout_node': True,
@ -112,6 +115,23 @@ deps = {
}
}
pre_deps_hooks = [
{
'name': 'generate_mtime_cache',
'condition': '(checkout_chromium and apply_patches and use_mtime_cache) and process_deps',
'pattern': 'src/electron',
'action': [
'python3',
'src/electron/script/patches-mtime-cache.py',
'generate',
'--cache-file',
'src/electron/patches/mtime-cache.json',
'--patches-config',
'src/electron/patches/config.json',
],
},
]
hooks = [
{
'name': 'patch_chromium',
@ -123,6 +143,18 @@ hooks = [
'src/electron/patches/config.json',
],
},
{
'name': 'apply_mtime_cache',
'condition': '(checkout_chromium and apply_patches and use_mtime_cache) and process_deps',
'pattern': 'src/electron',
'action': [
'python3',
'src/electron/script/patches-mtime-cache.py',
'apply',
'--cache-file',
'src/electron/patches/mtime-cache.json',
],
},
{
'name': 'electron_external_binaries',
'pattern': 'src/electron/script/update-external-binaries.py',

View file

@ -0,0 +1,185 @@
#!/usr/bin/env python
from __future__ import print_function
import argparse
import hashlib
import json
import os
import posixpath
import sys
import traceback
from lib.patches import patch_from_dir
def patched_file_paths(patches_config):
for patch_dir, repo in patches_config.items():
for line in patch_from_dir(patch_dir).split("\n"):
if line.startswith("+++"):
yield posixpath.join(repo, line[6:])
def generate_cache(patches_config):
mtime_cache = {}
for file_path in patched_file_paths(patches_config):
if file_path in mtime_cache:
# File may be patched multiple times, we don't need to
# rehash it since we are looking at the final result
continue
if not os.path.exists(file_path):
print("Skipping non-existent file:", file_path)
continue
with open(file_path, "rb") as f:
mtime_cache[file_path] = {
"sha256": hashlib.sha256(f.read()).hexdigest(),
"atime": os.path.getatime(file_path),
"mtime": os.path.getmtime(file_path),
}
return mtime_cache
def apply_mtimes(mtime_cache):
updates = []
for file_path, metadata in mtime_cache.items():
if not os.path.exists(file_path):
print("Skipping non-existent file:", file_path)
continue
with open(file_path, "rb") as f:
if hashlib.sha256(f.read()).hexdigest() == metadata["sha256"]:
updates.append(
[file_path, metadata["atime"], metadata["mtime"]]
)
# We can't atomically set the times for all files at once, but by waiting
# to update until we've checked all the files we at least have less chance
# of only updating some files due to an error on one of the files
for [file_path, atime, mtime] in updates:
os.utime(file_path, (atime, mtime))
def set_mtimes(patches_config, mtime):
mtime_cache = {}
for file_path in patched_file_paths(patches_config):
if file_path in mtime_cache:
continue
if not os.path.exists(file_path):
print("Skipping non-existent file:", file_path)
continue
mtime_cache[file_path] = mtime
for file_path in mtime_cache:
os.utime(file_path, (mtime_cache[file_path], mtime_cache[file_path]))
def main():
parser = argparse.ArgumentParser(
description="Make mtime cache for patched files"
)
subparsers = parser.add_subparsers(
dest="operation", help="sub-command help"
)
apply_subparser = subparsers.add_parser(
"apply", help="apply the mtimes from the cache"
)
apply_subparser.add_argument(
"--cache-file", required=True, help="mtime cache file"
)
apply_subparser.add_argument(
"--preserve-cache",
action="store_true",
help="don't delete cache after applying",
)
generate_subparser = subparsers.add_parser(
"generate", help="generate the mtime cache"
)
generate_subparser.add_argument(
"--cache-file",
type=argparse.FileType("w"),
required=True,
help="mtime cache file",
)
set_subparser = subparsers.add_parser(
"set", help="set all mtimes to a specific date"
)
set_subparser.add_argument(
"--mtime",
type=int,
required=True,
help="mtime to use for all patched files",
)
for subparser in [generate_subparser, set_subparser]:
subparser.add_argument(
"--patches-config",
type=argparse.FileType("r"),
required=True,
help="patches' config in the JSON format",
)
args = parser.parse_args()
if args.operation == "generate":
try:
mtime_cache = generate_cache(json.load(args.patches_config))
json.dump(mtime_cache, args.cache_file, indent=2)
except Exception:
print(
"ERROR: failed to generate mtime cache for patches",
file=sys.stderr,
)
traceback.print_exc(file=sys.stderr)
return 0
elif args.operation == "apply":
if not os.path.exists(args.cache_file):
print("ERROR: --cache-file does not exist", file=sys.stderr)
return 0 # Cache file may not exist, fail more gracefully
try:
with open(args.cache_file, mode="r") as f:
apply_mtimes(json.load(f))
if not args.preserve_cache:
os.remove(args.cache_file)
except Exception:
print(
"ERROR: failed to apply mtime cache for patches",
file=sys.stderr,
)
traceback.print_exc(file=sys.stderr)
return 0
elif args.operation == "set":
# Python 2/3 compatibility
try:
user_input = raw_input
except NameError:
user_input = input
answer = user_input(
"WARNING: Manually setting mtimes could mess up your build. "
"If you're sure, type yes: "
)
if answer.lower() != "yes":
print("Aborting")
return 0
set_mtimes(json.load(args.patches_config), args.mtime)
return 0
if __name__ == "__main__":
sys.exit(main())