chore: cleanup pylint violations (#26252)

* chore: cleanup pylint violations

* chore: cleanup pylint violatins
This commit is contained in:
David Sanders 2020-11-01 22:43:21 -08:00 committed by GitHub
parent 4c40ce09fd
commit 2a392c11f8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 55 additions and 21 deletions

View file

@ -15,5 +15,12 @@ args = [cmd, "run",
try:
subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
print("NPM script '" + sys.argv[2] + "' failed with code '" + str(e.returncode) + "':\n" + e.output)
print(
"NPM script '"
+ sys.argv[2]
+ "' failed with code '"
+ str(e.returncode)
+ "':\n"
+ e.output
)
sys.exit(e.returncode)

View file

@ -19,10 +19,10 @@ from get_toolchain_if_necessary import CalculateHash
@contextlib.contextmanager
def cwd(dir):
def cwd(directory):
curdir = os.getcwd()
try:
os.chdir(dir)
os.chdir(directory)
yield
finally:
os.chdir(curdir)
@ -70,12 +70,18 @@ def windows_profile():
win_sdk_dir = SetEnvironmentAndGetSDKDir()
path = NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])
# since current windows executable are symbols path dependant,
# profile the current directory too
return {
'pwd': os.getcwd(), # since current windows executable are symbols path dependant, profile the current directory too
'pwd': os.getcwd(),
'installed_software': windows_installed_software(),
'sdks': [
{'name': 'vs', 'path': path, 'hash': calculate_hash(path)},
{'name': 'wsdk', 'path': win_sdk_dir, 'hash': calculate_hash(win_sdk_dir)}
{
'name': 'wsdk',
'path': win_sdk_dir,
'hash': calculate_hash(win_sdk_dir),
},
],
'runtime_lib_dirs': runtime_dll_dirs,
}
@ -93,5 +99,5 @@ if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--output-json', metavar='FILE', default='profile.json',
help='write information about toolchain to FILE')
options, args = parser.parse_args()
sys.exit(main(options))
opts, args = parser.parse_args()
sys.exit(main(opts))

View file

@ -12,5 +12,7 @@ subprocess.check_output(["rm", "-rf", dest])
subprocess.check_output(["cp", "-a", source, dest])
# Strip headers, we do not need to ship them
subprocess.check_output(["rm", "-r", os.path.join(dest, 'Headers')])
subprocess.check_output(["rm", "-r", os.path.join(dest, 'Versions', 'Current', 'Headers')])
subprocess.check_output(["rm", "-r", os.path.join(dest, "Headers")])
subprocess.check_output(
["rm", "-r", os.path.join(dest, "Versions", "Current", "Headers")]
)

View file

@ -20,11 +20,12 @@ PATHS_TO_SKIP = [
'./libVkICD_mock_',
# Skip because these are outputs that we don't need.
'./VkICD_mock_',
# Skip because its an output of create_bundle from //build/config/mac/rules.gni
# that we don't need
# Skip because its an output of create_bundle from
# //build/config/mac/rules.gni that we don't need
'Electron.dSYM',
# Refs https://chromium-review.googlesource.com/c/angle/angle/+/2425197.
# Remove this when Angle themselves remove the file: https://issuetracker.google.com/issues/168736059
# Remove this when Angle themselves remove the file:
# https://issuetracker.google.com/issues/168736059
'gen/angle/angle_commit.h',
# //chrome/browser:resources depends on this via
# //chrome/browser/resources/ssl/ssl_error_assistant, but we don't need to
@ -50,7 +51,12 @@ def skip_path(dep, dist_zip, target_cpu):
should_skip = (
any(dep.startswith(path) for path in PATHS_TO_SKIP) or
any(dep.endswith(ext) for ext in EXTENSIONS_TO_SKIP) or
('arm' in target_cpu and dist_zip == 'mksnapshot.zip' and dep == 'snapshot_blob.bin'))
(
"arm" in target_cpu
and dist_zip == "mksnapshot.zip"
and dep == "snapshot_blob.bin"
)
)
if should_skip:
print("Skipping {}".format(dep))
return should_skip
@ -64,7 +70,7 @@ def execute(argv):
raise e
def main(argv):
dist_zip, runtime_deps, target_cpu, target_os, flatten_val = argv
dist_zip, runtime_deps, target_cpu, _, flatten_val = argv
should_flatten = flatten_val == "true"
dist_files = set()
with open(runtime_deps) as f:
@ -75,17 +81,28 @@ def main(argv):
if sys.platform == 'darwin' and not should_flatten:
execute(['zip', '-r', '-y', dist_zip] + list(dist_files))
else:
with zipfile.ZipFile(dist_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as z:
with zipfile.ZipFile(
dist_zip, 'w', zipfile.ZIP_DEFLATED, allowZip64=True
) as z:
for dep in dist_files:
if os.path.isdir(dep):
for root, dirs, files in os.walk(dep):
for file in files:
z.write(os.path.join(root, file))
for root, _, files in os.walk(dep):
for filename in files:
z.write(os.path.join(root, filename))
else:
basename = os.path.basename(dep)
dirname = os.path.dirname(dep)
arcname = os.path.join(dirname, 'chrome-sandbox') if basename == 'chrome_sandbox' else dep
z.write(dep, os.path.basename(arcname) if should_flatten else arcname)
arcname = (
os.path.join(dirname, 'chrome-sandbox')
if basename == 'chrome_sandbox'
else dep
)
z.write(
dep,
os.path.basename(arcname)
if should_flatten
else arcname,
)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View file

@ -49,7 +49,9 @@ def getBrokenLinks(filepath):
f.close()
linkRegexLink = re.compile('\[(.*?)\]\((?P<link>(.*?))\)')
referenceLinkRegex = re.compile('^\s{0,3}\[.*?\]:\s*(?P<link>[^<\s]+|<[^<>\r\n]+>)')
referenceLinkRegex = re.compile(
'^\s{0,3}\[.*?\]:\s*(?P<link>[^<\s]+|<[^<>\r\n]+>)'
)
links = []
for line in lines:
matchLinks = linkRegexLink.search(line)