Add zotero-build repo as scripts
folder
Minus obsolete 4.0 files
This commit is contained in:
parent
a3d7b58b83
commit
fb2b874614
20 changed files with 1596 additions and 26 deletions
53
package-lock.json
generated
53
package-lock.json
generated
|
@ -28,6 +28,7 @@
|
|||
"@babel/eslint-plugin": "^7.16.5",
|
||||
"@babel/preset-react": "^7.16.5",
|
||||
"@zotero/eslint-config": "^1.0.7",
|
||||
"acorn": "^8.8.2",
|
||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||
"browserify": "^14.5.0",
|
||||
"chai": "^4.1.2",
|
||||
|
@ -1089,9 +1090,9 @@
|
|||
"integrity": "sha512-G+chJctFPiiLGvs3+/Mly3apXTcfgE45dT5yp12BcWZ1kUs+gm0qd3/fv4gsz6fVag4mM0moHVpjHDIgph6Psg=="
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "7.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
|
||||
"integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
|
||||
"version": "8.8.2",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
|
||||
"integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
|
@ -1120,6 +1121,18 @@
|
|||
"xtend": "^4.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-node/node_modules/acorn": {
|
||||
"version": "7.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
|
||||
"integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-walk": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz",
|
||||
|
@ -3179,18 +3192,6 @@
|
|||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/espree/node_modules/acorn": {
|
||||
"version": "8.6.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz",
|
||||
"integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/esquery": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz",
|
||||
|
@ -7688,9 +7689,9 @@
|
|||
"integrity": "sha512-G+chJctFPiiLGvs3+/Mly3apXTcfgE45dT5yp12BcWZ1kUs+gm0qd3/fv4gsz6fVag4mM0moHVpjHDIgph6Psg=="
|
||||
},
|
||||
"acorn": {
|
||||
"version": "7.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
|
||||
"integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
|
||||
"version": "8.8.2",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
|
||||
"integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
|
||||
"dev": true
|
||||
},
|
||||
"acorn-jsx": {
|
||||
|
@ -7709,6 +7710,14 @@
|
|||
"acorn": "^7.0.0",
|
||||
"acorn-walk": "^7.0.0",
|
||||
"xtend": "^4.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"acorn": {
|
||||
"version": "7.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
|
||||
"integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"acorn-walk": {
|
||||
|
@ -9454,14 +9463,6 @@
|
|||
"acorn": "^8.6.0",
|
||||
"acorn-jsx": "^5.3.1",
|
||||
"eslint-visitor-keys": "^3.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"acorn": {
|
||||
"version": "8.6.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz",
|
||||
"integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"esquery": {
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
"@babel/eslint-plugin": "^7.16.5",
|
||||
"@babel/preset-react": "^7.16.5",
|
||||
"@zotero/eslint-config": "^1.0.7",
|
||||
"acorn": "^8.8.2",
|
||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||
"browserify": "^14.5.0",
|
||||
"chai": "^4.1.2",
|
||||
|
|
2
scripts/.gitignore
vendored
Normal file
2
scripts/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
config.sh
|
||||
node_modules
|
83
scripts/2xize
Executable file
83
scripts/2xize
Executable file
|
@ -0,0 +1,83 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
def append2x(content, dir):
|
||||
header = """/* BEGIN 2X BLOCK -- DO NOT EDIT MANUALLY -- USE 2XIZE */
|
||||
@media (min-resolution: 1.25dppx) {\n"""
|
||||
footer = "\n}\n"
|
||||
output = ""
|
||||
for line in content.split("\n"):
|
||||
matches = re.match('(.+chrome://zotero/skin/)(.+(?<!@2x)\.png)(.+)', line)
|
||||
|
||||
if not matches:
|
||||
# Skip lines that are indented but don't reference an image, which are
|
||||
# hopefully rule lines
|
||||
if re.match('\s', line):
|
||||
continue
|
||||
# Pass other lines through as is, since those are likely selector lines
|
||||
output += line
|
||||
continue
|
||||
|
||||
png = matches.group(2)
|
||||
png2x = png.replace('.png', '@2x.png')
|
||||
|
||||
if os.path.exists(os.path.join(root_dir, png2x)):
|
||||
output += matches.group(1) + png2x + matches.group(3)
|
||||
else:
|
||||
output += line
|
||||
output += "}"
|
||||
# Collapse all spaces
|
||||
output = re.sub("\s+", " ", output)
|
||||
# Add space before and after "{"
|
||||
output = re.sub("(?!: ){\s*", " { ", output)
|
||||
# Add newline after }
|
||||
output = re.sub("}", "}\n", output)
|
||||
# Add space before }
|
||||
output = re.sub("(?!: )}", " }", output)
|
||||
# Strip comments
|
||||
output = re.sub("/\*[^*]+\*/", "", output)
|
||||
# Remove all blocks without 2x rules
|
||||
output = "\n".join(["\t" + line for line in output.split("\n") if "2x.png" in line])
|
||||
return header + output + footer
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("""Appends 2x image rules to a CSS file for all images with @2x.png versions in the same directory
|
||||
|
||||
Usage: {0} /path/to/css/file [/path/to/image/directory]""".format(sys.argv[0]))
|
||||
sys.exit(1)
|
||||
|
||||
css_file = sys.argv[1]
|
||||
if not os.path.exists(css_file):
|
||||
print("File not found: " + css_file)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
root_dir = sys.argv[2]
|
||||
if not os.path.exists(root_dir):
|
||||
print("Directory not found: " + root_dir)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(root_dir):
|
||||
print("{0} is not a directory".format(root_dir))
|
||||
sys.exit(1)
|
||||
except IndexError:
|
||||
root_dir = os.path.dirname(os.path.realpath(css_file))
|
||||
|
||||
# Get file contents through 2x block, which has to be at the end
|
||||
css = ""
|
||||
with open(css_file) as f:
|
||||
for line in f:
|
||||
if "BEGIN 2X BLOCK" in line:
|
||||
break
|
||||
css += line
|
||||
css = css.strip()
|
||||
|
||||
# Add 2x block
|
||||
css += "\n\n\n" + append2x(css, root_dir)
|
||||
|
||||
with open(css_file, 'w') as f:
|
||||
f.write(css)
|
8
scripts/README.md
Normal file
8
scripts/README.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
Zotero build scripts
|
||||
================================
|
||||
|
||||
2xize: Add 2x image rules to CSS files
|
||||
|
||||
update-citeproc.sh: Update citeproc.js from Frank Bennett's Bitbucket source
|
||||
|
||||
xpi: Build distributable Firefox XPIs from the [Zotero core](https://github.com/zotero/zotero).
|
1
scripts/config.sh-sample
Normal file
1
scripts/config.sh-sample
Normal file
|
@ -0,0 +1 @@
|
|||
export ZOTEROSRC="../zotero"
|
148
scripts/dictionaries/build-dictionaries
Executable file
148
scripts/dictionaries/build-dictionaries
Executable file
|
@ -0,0 +1,148 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import urllib.request, json, re
|
||||
from functools import cmp_to_key
|
||||
from locale import strcoll
|
||||
|
||||
locales = {
|
||||
# Keep in sync with locale.js in zotero-client
|
||||
'ar': 'عربي',
|
||||
'bg-BG': 'Български',
|
||||
'br': 'brezhoneg',
|
||||
'ca-AD': 'Català',
|
||||
'cs-CZ': 'Čeština',
|
||||
'da-DK': 'Dansk',
|
||||
'de': 'Deutsch (Deutschland)',
|
||||
'en-CA': 'English (Canada)',
|
||||
'en-US': 'English',
|
||||
'en-GB': 'English (UK)',
|
||||
'es-ES': 'Español',
|
||||
'et-EE': 'Eesti keel',
|
||||
'fa': 'فارسی',
|
||||
'fi-FI': 'suomi',
|
||||
'fr-FR': 'Français',
|
||||
'gl-ES': 'Galego',
|
||||
'hu-HU': 'magyar',
|
||||
'id-ID': 'Bahasa Indonesia',
|
||||
'is-IS': 'íslenska',
|
||||
'it-IT': 'Italiano',
|
||||
'ja-JP': '日本語',
|
||||
'km': 'ខ្មែរ',
|
||||
'ko-KR': '한국어',
|
||||
'lt-LT': 'Lietuvių',
|
||||
'nl-NL': 'Nederlands',
|
||||
'nb-NO': 'Norsk bokmål',
|
||||
'pl-PL': 'Polski',
|
||||
'pt-BR': 'Português (do Brasil)',
|
||||
'pt-PT': 'Português (Europeu)',
|
||||
'ro-RO': 'Română',
|
||||
'ru-RU': 'Русский',
|
||||
'sk-SK': 'slovenčina',
|
||||
'sl-SI': 'Slovenščina',
|
||||
'sr-RS': 'Српски',
|
||||
'sv-SE': 'Svenska',
|
||||
'th-TH': 'ไทย',
|
||||
'tr-TR': 'Türkçe',
|
||||
'uk-UA': 'Українська',
|
||||
'vi-VN': 'Tiếng Việt',
|
||||
'zh-CN': '中文 (简体)',
|
||||
'zh-TW': '正體中文 (繁體)',
|
||||
|
||||
# Additional dictionaries not included as client locales
|
||||
# Names from https://addons.mozilla.org/en-US/firefox/language-tools/
|
||||
'de-AT': 'Deutsch (Österreich)',
|
||||
'de-CH': 'Deutsch (Schweiz)',
|
||||
'el-GR': 'Ελληνικά',
|
||||
'es-AR': 'Español (de Argentina)',
|
||||
'es-MX': 'Español (de México)',
|
||||
'he-HE': 'עברית',
|
||||
'hr-HR': 'Hrvatski',
|
||||
'lv-LV': 'Latviešu',
|
||||
}
|
||||
|
||||
# Locales to sort before other variants
|
||||
primary_locales = ['de-DE', 'en-US', 'es-ES']
|
||||
|
||||
# Generate list of available dictionaries, sorted by user count descending
|
||||
dictionaries = []
|
||||
with urllib.request.urlopen("https://services.addons.mozilla.org/api/v4/addons/language-tools/?app=firefox&type=dictionary") as resp:
|
||||
dictionary_info_list = json.loads(resp.read().decode())['results']
|
||||
n = 0
|
||||
for dictionary_info in dictionary_info_list:
|
||||
n += 1
|
||||
locale = dictionary_info['target_locale']
|
||||
guid = dictionary_info['guid']
|
||||
print(str(n) + '/' + str(len(dictionary_info_list)) + ': ' + dictionary_info['target_locale'])
|
||||
with urllib.request.urlopen("https://services.addons.mozilla.org/api/v4/addons/search/?guid=" + guid) as resp:
|
||||
dictionary = json.loads(resp.read().decode())['results'][0]
|
||||
if dictionary['is_disabled'] or dictionary['status'] != 'public':
|
||||
print('skipping ' + locale + ' ' + guid)
|
||||
continue
|
||||
dictionaries.append({
|
||||
'id': guid,
|
||||
'locale': locale,
|
||||
'version': dictionary['current_version']['version'],
|
||||
'updated': dictionary['last_updated'],
|
||||
'url': dictionary['current_version']['files'][0]['url'],
|
||||
'users': dictionary['average_daily_users'],
|
||||
})
|
||||
dictionaries.sort(key=lambda x: x.get('users'), reverse=True)
|
||||
|
||||
# Find dictionaries best matching the specified locales
|
||||
final_dictionaries = []
|
||||
for locale in locales:
|
||||
locale_lang = re.split('[-_]', locale)[0]
|
||||
# A locale code with the language duplicated (e.g., 'de-DE'), which may not
|
||||
# be the actual code
|
||||
locale_lang_full = "{}-{}".format(locale_lang, locale_lang.upper())
|
||||
|
||||
for i, d in enumerate(dictionaries[:]):
|
||||
# Exact match
|
||||
if (d['locale'] == locale
|
||||
# locale 'de' == dict 'de-DE'
|
||||
or (len(locale) == 2 and d['locale'] == locale_lang_full)
|
||||
# locale 'bg-BG' -> dict 'bg'
|
||||
or (locale == locale_lang_full and d['locale'] == locale_lang)):
|
||||
d['name'] = locales[locale]
|
||||
final_dictionaries.append(d)
|
||||
del(dictionaries[i])
|
||||
break
|
||||
else:
|
||||
# If nothing found, allow missing differing region ('cs-cZ' -> 'cs')
|
||||
if len(locale) != 2 and locale != locale_lang_full:
|
||||
for i, d in enumerate(dictionaries[:]):
|
||||
if d['locale'] == locale_lang:
|
||||
d['name'] = locales[locale]
|
||||
final_dictionaries.append(d)
|
||||
del(dictionaries[i])
|
||||
break
|
||||
|
||||
# Sort dictionaries by language code, with a few exceptions
|
||||
def cmp(a, b):
|
||||
for locale in primary_locales:
|
||||
if a['locale'] == locale and b['locale'].startswith(locale[0:3]):
|
||||
return -1
|
||||
if b['locale'] == locale and a['locale'].startswith(locale[0:3]):
|
||||
return 1
|
||||
return strcoll(a['locale'], b['locale'])
|
||||
|
||||
final_dictionaries = sorted(final_dictionaries, key=cmp_to_key(cmp))
|
||||
|
||||
print("")
|
||||
|
||||
for d in final_dictionaries:
|
||||
print("Downloading {}".format(d['url']))
|
||||
urllib.request.urlretrieve(
|
||||
d['url'],
|
||||
'dictionaries/' + d['id'] + '-' + d['version'] + '.xpi'
|
||||
)
|
||||
del(d['url'])
|
||||
|
||||
with open('dictionaries/dictionaries.json', 'w', encoding='utf-8') as f:
|
||||
json.dump(final_dictionaries, f, ensure_ascii=False, sort_keys=True, indent='\t')
|
||||
|
||||
# Save a list of unused dictionaries
|
||||
with open('dictionaries/dictionaries-unused.json', 'w', encoding='utf-8') as f:
|
||||
json.dump(dictionaries, f, ensure_ascii=False, indent='\t')
|
||||
|
||||
print('\ndone')
|
4
scripts/dictionaries/dictionaries/.gitignore
vendored
Normal file
4
scripts/dictionaries/dictionaries/.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
109
scripts/generate-isbn-ranges.py
Executable file
109
scripts/generate-isbn-ranges.py
Executable file
|
@ -0,0 +1,109 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import urllib.request, sys, argparse, os, json, re
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# Command line args
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("out_file", help='File to output to.', nargs="?")
|
||||
args = parser.parse_args();
|
||||
|
||||
# Fetch ranges
|
||||
# See https://www.isbn-international.org/range_file_generation
|
||||
url = 'https://www.isbn-international.org/export_rangemessage.xml'
|
||||
print('Fetching XML file from ' + url, file=sys.stderr)
|
||||
rangesRoot = ET.parse(urllib.request.urlopen(url))
|
||||
print('Done', file=sys.stderr)
|
||||
|
||||
# Make sure we're always dealing with integers, so that nothing breaks in unexpected ways
|
||||
intRE = re.compile(r'^\d+$');
|
||||
|
||||
# Parse them into JSON
|
||||
uniqueRanges = {}
|
||||
sameRanges = {} # To reduce redundancy, we can alias same ranges
|
||||
for group in rangesRoot.iter('Group'):
|
||||
(uccPrefix, groupPrefix) = group.find('Prefix').text.split('-')
|
||||
|
||||
if not intRE.match(uccPrefix) or not intRE.match(groupPrefix):
|
||||
print("WARNING: Unexpected prefixes: " + uccPrefix + " " + groupPrefix, file=sys.stderr)
|
||||
continue
|
||||
|
||||
ranges = []
|
||||
for rule in group.iter('Rule'):
|
||||
length = int(rule.find('Length').text)
|
||||
if length <= 0: # 0 length means that the range has not been assigned yet
|
||||
continue
|
||||
|
||||
range = rule.find('Range').text.split('-')
|
||||
if not intRE.match(range[0]) or not intRE.match(range[1]):
|
||||
print("WARNING: Unexpected range: " + range[0] + " " + range[1], file=sys.stderr)
|
||||
continue
|
||||
|
||||
ranges.append(range[0][:length])
|
||||
ranges.append(range[1][:length])
|
||||
|
||||
if len(ranges) == 0:
|
||||
continue
|
||||
|
||||
# In case this is out of order in the XML file
|
||||
# Sort ranges by string length first, then by numeric value
|
||||
# 0 9 00 09 100 0005
|
||||
ranges.sort(key=lambda x: str(len(x)) + '-' + x)
|
||||
|
||||
key = '.'.join(ranges)
|
||||
if key in sameRanges:
|
||||
sameRanges[key].append([uccPrefix, groupPrefix])
|
||||
else:
|
||||
if uccPrefix not in uniqueRanges:
|
||||
uniqueRanges[uccPrefix] = {}
|
||||
|
||||
uniqueRanges[uccPrefix][groupPrefix] = ranges
|
||||
sameRanges[key] = [[uccPrefix, groupPrefix]]
|
||||
|
||||
|
||||
# Output to file as JavaScript
|
||||
file = """/** THIS FILE WAS GENERATED AUTOMATICALLY **/
|
||||
|
||||
/**
|
||||
* ISBN Registrant ranges from https://www.isbn-international.org/range_file_generation
|
||||
**/
|
||||
Zotero.ISBN = {};
|
||||
Zotero.ISBN.ranges = (function() {
|
||||
var ranges = """
|
||||
|
||||
rangesJSON = json.dumps(uniqueRanges, separators=(',', ': '), indent="\t", sort_keys=True)
|
||||
rangesJSON = re.sub(r'(?<= \[|\d"|",)\s+', '', rangesJSON) # Remove newlines in ranges array
|
||||
file += '\n\t'.join(rangesJSON.split('\n')) # Add extra indent
|
||||
file += ";\n\t\n\t"
|
||||
|
||||
# For same ranges, don't duplicate data, just re-assign it
|
||||
dupes = []
|
||||
for _, ranges in sameRanges.items():
|
||||
if len(ranges) == 1:
|
||||
continue # No duplicates
|
||||
|
||||
last = ranges.pop(0) # First range actually contains the value that needs to get assigned, so it needs to end up last
|
||||
ranges.sort(key=lambda r: '.'.join(r)) # Try to keep the list stable to keep the diff reasonable
|
||||
ranges.append(last)
|
||||
|
||||
dupes.append(' = '.join(map(lambda r: "ranges['" + "']['".join(r) + "']", ranges)))
|
||||
|
||||
#try to keeps this as stable as possible
|
||||
dupes.sort()
|
||||
file += ";\n\t".join(dupes) + ";"
|
||||
|
||||
file += """
|
||||
|
||||
return ranges;
|
||||
})();"""
|
||||
|
||||
|
||||
if args.out_file is not None:
|
||||
# Try printing to file if one is provided
|
||||
print('Writing ranges to ' + args.out_file, file=sys.stderr)
|
||||
|
||||
f = open(args.out_file, 'w')
|
||||
print(file, file=f)
|
||||
else:
|
||||
# Print to stdout
|
||||
print(file)
|
1
scripts/locale/.gitignore
vendored
Normal file
1
scripts/locale/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
packs
|
30
scripts/locale/connector_to_client
Executable file
30
scripts/locale/connector_to_client
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
connector_dir=${1:-}
|
||||
client_dir=${2:-}
|
||||
|
||||
if [[ -z "$connector_dir" ]]; then
|
||||
connector_dir=~/zotero-connectors
|
||||
fi
|
||||
|
||||
if [[ -z "$client_dir" ]]; then
|
||||
client_dir=~/zotero-client
|
||||
fi
|
||||
|
||||
json_file=$connector_dir/src/messages.json
|
||||
client_dir=$client_dir/chrome/locale
|
||||
|
||||
if [ ! -e "$json_file" ]; then
|
||||
echo "$json_file does not exist" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$client_dir" ]; then
|
||||
echo "$client_dir is not a directory" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cp $json_file $client_dir/en-US/zotero/connector.json
|
||||
cd $client_dir
|
||||
git diff
|
21
scripts/locale/filter_connector_json
Executable file
21
scripts/locale/filter_connector_json
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Filter connector.json to remove empty messages and delete descriptions
|
||||
#
|
||||
|
||||
if [ ! -d "$1/en-US/zotero" ]; then
|
||||
echo "Usage: $0 /path/to/zotero/chrome/locale"
|
||||
exit
|
||||
fi
|
||||
|
||||
DIR="$1"
|
||||
|
||||
cd $DIR
|
||||
for i in `find . -name connector.json -not -path ./en-US/zotero/connector.json`; do
|
||||
cat $i | jq 'to_entries | map(select(.value.message != "") | del(.value.description)) | from_entries' > $i.new
|
||||
mv $i.new $i
|
||||
# Delete empty files, which we could probably do in jq but this is easy
|
||||
if [[ `cat $i` = '{}' ]]; then
|
||||
rm $i
|
||||
fi
|
||||
done
|
284
scripts/locale/localizer
Executable file
284
scripts/locale/localizer
Executable file
|
@ -0,0 +1,284 @@
|
|||
#!/usr/bin/env php
|
||||
<?php
|
||||
error_reporting(E_ALL);
|
||||
|
||||
$english_path = 'en-US-new'; // New English locale
|
||||
$locale_path = 'locales'; // Existing locales from previous version
|
||||
$content_locale_path = 'content-locales'; // Existing content locales from previous version
|
||||
$output_dir = 'output/locale'; // Directory to save merged locales
|
||||
$content_output_dir = 'output/content'; // Directory to save merged content locales
|
||||
$matching_are_missing = 1; // matching non-english strings should be considered missing
|
||||
$missing_mode = 0; // 0 = default to english; 1 = leave blank; 2 = skip (for BZ)
|
||||
$use_content_input_dir = false;
|
||||
$use_content_output_dir = true; // set to true for XPI, false for BZ
|
||||
$localeCodeInOutputXML = true; // set to true for XPI, false for BZ
|
||||
|
||||
$english_files = scandir($english_path);
|
||||
$locales = array_slice(scandir($locale_path), 2); // strip '.' and '..'
|
||||
|
||||
// Make output directories for each locale
|
||||
foreach ($locales as $locale) {
|
||||
preg_match('/([a-z]{2})(\-[A-Z]{2})?/', $locale, $matches);
|
||||
if (!isset($matches[1])) {
|
||||
continue;
|
||||
}
|
||||
$dir = $output_dir . '/' . $locale . '/zotero/';
|
||||
@mkdir($dir, 0775, true);
|
||||
}
|
||||
|
||||
// Make content output directory for CSL files
|
||||
if ($use_content_output_dir) {
|
||||
@mkdir("$content_output_dir/csl/", 0775, true);
|
||||
}
|
||||
|
||||
foreach ($english_files as $file) {
|
||||
if (preg_match("/^\./", $file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$extension = substr(strrchr($file, '.'), 1);
|
||||
|
||||
foreach ($locales as $locale) {
|
||||
preg_match('/([a-z]{2})(\-[A-Z]{2})?/', $locale, $matches);
|
||||
if (!isset($matches[1])) {
|
||||
continue;
|
||||
}
|
||||
$locale = $matches[1];
|
||||
if (!empty($matches[2])) {
|
||||
$locale .= $matches[2];
|
||||
}
|
||||
|
||||
if ($file == 'locales.xml') {
|
||||
if (strlen($locale) == 2) {
|
||||
$csl_locale = $locale . '-' . strtoupper($locale);
|
||||
}
|
||||
else {
|
||||
$csl_locale = $locale;
|
||||
}
|
||||
|
||||
if ($use_content_input_dir) {
|
||||
$locale_source_file = "$content_locale_path/csl/locales-$csl_locale.xml";
|
||||
}
|
||||
else {
|
||||
$locale_source_file = "$locale_path/$locale/zotero/$file";
|
||||
}
|
||||
|
||||
if ($use_content_output_dir) {
|
||||
$dir = "$content_output_dir/csl/";
|
||||
}
|
||||
else {
|
||||
$dir = "$output_dir/$locale/zotero/";
|
||||
}
|
||||
|
||||
if ($localeCodeInOutputXML) {
|
||||
$save_file = "locales-$csl_locale.xml";
|
||||
}
|
||||
else {
|
||||
$save_file = "locales.xml";
|
||||
}
|
||||
|
||||
echo "Saving {$dir}{$save_file}\n";
|
||||
|
||||
$string = generate_csl_locale("$english_path/$file",
|
||||
$locale_source_file, $locale);
|
||||
}
|
||||
else {
|
||||
$dir = "$output_dir/$locale/zotero/";
|
||||
echo "Saving {$dir}{$file}\n";
|
||||
|
||||
$save_file = $file;
|
||||
|
||||
$string = generate_locale($extension, "$english_path/$file",
|
||||
"$locale_path/$locale/zotero/$file");
|
||||
}
|
||||
|
||||
// We can't handle this file, so bail
|
||||
if (!$string) {
|
||||
echo "Error generating file!\n";
|
||||
continue;
|
||||
}
|
||||
|
||||
file_put_contents($dir . $save_file, $string);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function parse_strings($type, $file) {
|
||||
if (!file_exists($file)) {
|
||||
return array();
|
||||
}
|
||||
|
||||
$lines = file($file);
|
||||
$pairs = array();
|
||||
|
||||
switch ($type) {
|
||||
case 'dtd':
|
||||
$regex = '|<!ENTITY[\s]*([^([^\s]*)\s*"(.*)">\s*$|';
|
||||
break;
|
||||
case 'properties':
|
||||
$regex = '|^(?:#\s*)?([^\s]*)\s*=\s*(.*)$|';
|
||||
break;
|
||||
default:
|
||||
echo "Unsupported extension $type\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach ($lines as $line) {
|
||||
preg_match($regex, $line, $matches);
|
||||
|
||||
if (!empty($matches[0])) {
|
||||
$pairs[$matches[1]] = $matches[2];
|
||||
}
|
||||
else {
|
||||
array_push($pairs, NULL);
|
||||
}
|
||||
}
|
||||
return $pairs;
|
||||
}
|
||||
|
||||
|
||||
|
||||
function generate_locale($type, $english_file, $locale_file) {
|
||||
GLOBAL $missing_mode, $matching_are_missing;
|
||||
|
||||
$output = '';
|
||||
$english_pairs = parse_strings($type, $english_file);
|
||||
if (!$english_pairs) {
|
||||
return false;
|
||||
}
|
||||
|
||||
$locale_pairs = parse_strings($type, $locale_file);
|
||||
|
||||
foreach ($english_pairs as $key=>$val) {
|
||||
if (!$val) {
|
||||
if ($output != '') {
|
||||
$output .= "\n";
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
switch ($type) {
|
||||
case 'dtd':
|
||||
$prefix = '<!ENTITY ';
|
||||
$middle = " \"";
|
||||
$suffix = '">';
|
||||
break;
|
||||
case 'properties':
|
||||
$prefix = '';
|
||||
$middle = '=';
|
||||
$suffix = '';
|
||||
break;
|
||||
default:
|
||||
echo "Unsupported extension $type\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
// If missing mode is 2, skip strings not available in this locale
|
||||
if (empty($locale_pairs[$key]) && $missing_mode == 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$output .= $prefix;
|
||||
|
||||
$missing = empty($locale_pairs[$key])
|
||||
|| ($matching_are_missing && $english_pairs[$key] === $locale_pairs[$key]);
|
||||
|
||||
// Use existing locale string if we have it
|
||||
if (!$missing) {
|
||||
$output .= $key . $middle . $locale_pairs[$key];
|
||||
}
|
||||
// Default missing strings to English
|
||||
else if ($missing_mode == 0) {
|
||||
$output .= $key . $middle . $english_pairs[$key];
|
||||
}
|
||||
// Leave missing strings blank
|
||||
else if ($missing_mode == 1) {
|
||||
$output .= $key . $middle;
|
||||
}
|
||||
|
||||
$output .= $suffix;
|
||||
$output .= "\n";
|
||||
}
|
||||
|
||||
return $output;
|
||||
}
|
||||
|
||||
|
||||
function generate_csl_locale($english_file, $locale_file, $locale) {
|
||||
$output = '';
|
||||
|
||||
$english_str = file_get_contents($english_file);
|
||||
$english_sxe = new SimpleXMLElement($english_str);
|
||||
|
||||
$str = file_get_contents($locale_file);
|
||||
if (!$str) {
|
||||
echo "Locale version of locales.xml not found\n";
|
||||
return $english_str;
|
||||
}
|
||||
$locale_sxe = new SimpleXMLElement($str);
|
||||
|
||||
$xw = new XMLWriter();
|
||||
$xw->openMemory();
|
||||
$xw->startDocument('1.0', 'UTF-8');
|
||||
$xw->startElement ('terms');
|
||||
$xw->writeAttribute('xmlns', 'http://purl.org/net/xbiblio/csl');
|
||||
$xw->startElement('locale');
|
||||
$xw->writeAttribute('xml:lang', substr($locale, 0, 2));
|
||||
|
||||
$locale_sxe->registerXPathNamespace('csl', 'http://purl.org/net/xbiblio/csl');
|
||||
|
||||
foreach ($english_sxe->locale->term as $term) {
|
||||
$name = $term->attributes()->name;
|
||||
$form = $term->attributes()->form;
|
||||
|
||||
if ($form) {
|
||||
$node = $locale_sxe->xpath("//csl:term[@name='$name' and @form='$form']");
|
||||
}
|
||||
else {
|
||||
$node = $locale_sxe->xpath("//csl:term[@name='$name' and not(@form)]");
|
||||
}
|
||||
|
||||
if (isset($node[0])) {
|
||||
$node = $node[0];
|
||||
}
|
||||
else {
|
||||
$node = $term;
|
||||
}
|
||||
|
||||
$xw->startElement('term');
|
||||
$xw->writeAttribute('name', $name);
|
||||
if ($form) {
|
||||
$xw->writeAttribute('form', $form);
|
||||
}
|
||||
|
||||
if (sizeOf($term->children()) > 0) {
|
||||
$xw->startElement('single');
|
||||
$xw->text($node->single ? $node->single : $term->single);
|
||||
$xw->endElement();
|
||||
$xw->startElement('multiple');
|
||||
$xw->text($node->multiple ? $node->multiple : $term->multiple);
|
||||
$xw->endElement();
|
||||
}
|
||||
else {
|
||||
// If original had children and we don't any longer, use English
|
||||
if (sizeOf($node[0]->children()) > 0) {
|
||||
$xw->text($term);
|
||||
}
|
||||
// Otherwise use the locale string
|
||||
else {
|
||||
$xw->text($node[0]);
|
||||
}
|
||||
}
|
||||
$xw->endElement(); // </term>
|
||||
}
|
||||
|
||||
$xw->endElement(); // </locale>
|
||||
$xw->endElement(); // </terms>
|
||||
$str = $xw->outputMemory(true);
|
||||
|
||||
$doc = new DOMDocument('1.0');
|
||||
$doc->formatOutput = true;
|
||||
$doc->loadXML($str);
|
||||
return $doc->saveXML();
|
||||
}
|
||||
?>
|
166
scripts/locale/merge_csl_locales
Executable file
166
scripts/locale/merge_csl_locales
Executable file
|
@ -0,0 +1,166 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.request
|
||||
import shutil
|
||||
import xml.etree.ElementTree as ET
|
||||
import unicodedata
|
||||
|
||||
LOCALES_GIT_REF = "master"
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2 or not os.path.isdir(sys.argv[1]):
|
||||
sys.stderr.write(
|
||||
"Usage: {0} path/to/zotero/source\n".format(os.path.basename(sys.argv[0]))
|
||||
)
|
||||
return 1
|
||||
|
||||
source_dir = sys.argv[1]
|
||||
schema_dir = os.path.join(source_dir, "resource", "schema")
|
||||
|
||||
csl_locale_base = "https://raw.githubusercontent.com/citation-style-language/locales/{ref}/locales-{lang}.xml"
|
||||
|
||||
# Codes for the language packs that we want to grab
|
||||
language_packs = [
|
||||
"af-ZA",
|
||||
"ar",
|
||||
"bg-BG",
|
||||
"ca-AD",
|
||||
"cs-CZ",
|
||||
"cy-GB",
|
||||
"da-DK",
|
||||
"de-AT",
|
||||
"de-CH",
|
||||
"de-DE",
|
||||
"el-GR",
|
||||
"en-GB",
|
||||
"en-US",
|
||||
"es-CL",
|
||||
"es-ES",
|
||||
"es-MX",
|
||||
"et-EE",
|
||||
"eu",
|
||||
"fa-IR",
|
||||
"fi-FI",
|
||||
"fr-CA",
|
||||
"fr-FR",
|
||||
"he-IL",
|
||||
"hi-IN",
|
||||
"hr-HR",
|
||||
"hu-HU",
|
||||
"id-ID",
|
||||
"is-IS",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"km-KH",
|
||||
"ko-KR",
|
||||
"la",
|
||||
"lt-LT",
|
||||
"lv-LV",
|
||||
"mn-MN",
|
||||
"nb-NO",
|
||||
"nl-NL",
|
||||
"nn-NO",
|
||||
"pl-PL",
|
||||
"pt-BR",
|
||||
"pt-PT",
|
||||
"ro-RO",
|
||||
"ru-RU",
|
||||
"sk-SK",
|
||||
"sl-SI",
|
||||
"sr-RS",
|
||||
"sv-SE",
|
||||
"th-TH",
|
||||
"tr-TR",
|
||||
"uk-UA",
|
||||
"vi-VN",
|
||||
"zh-CN",
|
||||
"zh-TW",
|
||||
]
|
||||
|
||||
number_formats = {}
|
||||
|
||||
for lang in language_packs:
|
||||
url = csl_locale_base.format(ref=LOCALES_GIT_REF, lang=lang)
|
||||
|
||||
print("Loading from " + url)
|
||||
with urllib.request.urlopen(url) as response:
|
||||
code = response.getcode()
|
||||
if code != 200:
|
||||
sys.stderr.write("Got {0} for {1}\n".format(code, url))
|
||||
return 1
|
||||
xml = ET.parse(response)
|
||||
|
||||
# first, pull out all the translations for "edition", "editions", and "ed."
|
||||
edition_locators = set()
|
||||
for elem in xml.findall(".//{*}term[@name='edition']"):
|
||||
edition_locators.update(get_all_values(elem))
|
||||
edition_locators = list(edition_locators)
|
||||
|
||||
# next, the translations for "-st", "-nd", "-rd", and "-th"
|
||||
short_ordinal_suffixes = set()
|
||||
for term in xml.findall(".//{*}term"):
|
||||
name = term.attrib.get("name", "")
|
||||
value = term.text
|
||||
if not (name.startswith("ordinal") and value):
|
||||
continue
|
||||
short_ordinal_suffixes.add(value)
|
||||
short_ordinal_suffixes.add(strip_superscript_chars(value))
|
||||
short_ordinal_suffixes = list(short_ordinal_suffixes)
|
||||
|
||||
# lastly, the translations for "first", "second", "third", etc.
|
||||
long_ordinals = {}
|
||||
for term in xml.findall(".//{*}term"):
|
||||
name = term.attrib.get("name", "")
|
||||
if not name.startswith("long-ordinal-"):
|
||||
continue
|
||||
long_ordinals[term.text] = int(
|
||||
term.attrib.get("name", "").rsplit("-", 1)[1]
|
||||
) # parse the "01" in "long-ordinal-01"
|
||||
|
||||
number_formats[lang] = {
|
||||
"locators": {"edition": edition_locators},
|
||||
"ordinals": {"short": short_ordinal_suffixes, "long": long_ordinals},
|
||||
}
|
||||
|
||||
number_formats[
|
||||
"credit"
|
||||
] = f"Generated from the CSL locales repository <https://github.com/citation-style-language/locales/tree/{LOCALES_GIT_REF}> by https://github.com/zotero/zotero-build/blob/master/locale/merge_csl_locales"
|
||||
|
||||
with open(os.path.join(schema_dir, "cslLocaleStrings.json"), "w") as outfile:
|
||||
json.dump(number_formats, outfile, ensure_ascii=False, indent='\t')
|
||||
print(f'Saved combined locales to {os.path.join(schema_dir, "cslLocaleStrings.json")}')
|
||||
|
||||
|
||||
def get_all_values(elem):
|
||||
text = elem.text.strip()
|
||||
single = elem.findtext("{*}single")
|
||||
multiple = elem.findtext("{*}multiple")
|
||||
if text:
|
||||
yield text
|
||||
if single:
|
||||
yield single
|
||||
if multiple:
|
||||
yield multiple
|
||||
|
||||
|
||||
def strip_superscript_chars(s):
|
||||
"""Replace all Unicode superscript modifier characters in a string with their non-superscript
|
||||
counterparts and return the modified string."""
|
||||
output = []
|
||||
for c in s:
|
||||
decomposition = unicodedata.decomposition(c)
|
||||
if decomposition.startswith("<super> "):
|
||||
output.append(
|
||||
chr(int(unicodedata.decomposition(c)[len("<super> ") :], base=16))
|
||||
)
|
||||
else:
|
||||
output.append(c)
|
||||
return "".join(output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
262
scripts/locale/merge_mozilla_files
Executable file
262
scripts/locale/merge_mozilla_files
Executable file
|
@ -0,0 +1,262 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.request
|
||||
import shutil
|
||||
import zipfile
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
def main():
|
||||
use_cache = True
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
if len(sys.argv) < 2 or not os.path.isdir(sys.argv[1]):
|
||||
sys.stderr.write("Usage: {0} path/to/zotero/source\n".format(
|
||||
os.path.basename(sys.argv[0])))
|
||||
return 1
|
||||
|
||||
source_dir = sys.argv[1]
|
||||
locales_dir = os.path.join(source_dir, 'chrome', 'locale')
|
||||
schema_dir = os.path.join(source_dir, 'resource', 'schema')
|
||||
|
||||
# AMO download links for Firefox language packs
|
||||
language_packs = [
|
||||
["en-US", "https://addons.mozilla.org/en-US/firefox/addon/english-us-language-pack/"],
|
||||
["af", "https://addons.mozilla.org/en-US/firefox/addon/afrikaans-language-pack/", "af-ZA"],
|
||||
["ar", "https://addons.mozilla.org/en-US/firefox/addon/%D8%A7%D9%84%D8%B9%D8%B1%D8%A8%D9%8A%D8%A9-language-pack/"],
|
||||
["bg", "https://addons.mozilla.org/en-US/firefox/addon/%D0%B1%D1%8A%D0%BB%D0%B3%D0%B0%D1%80%D1%81%D0%BA%D0%B8-language-pack/", "bg-BG"],
|
||||
["ca", "https://addons.mozilla.org/en-US/firefox/addon/ca-language-pack/", "ca-AD"],
|
||||
["cs", "https://addons.mozilla.org/en-US/firefox/addon/czech-cz-language-pack/", "cs-CZ"],
|
||||
["da", "https://addons.mozilla.org/en-US/firefox/addon/dansk-da-language-pack/", "da-DK"],
|
||||
["de", "https://addons.mozilla.org/en-US/firefox/addon/deutsch-de-language-pack/"],
|
||||
["el", "https://addons.mozilla.org/en-US/firefox/addon/greek-gr-language-pack/", "el-GR"],
|
||||
["en-GB", "https://addons.mozilla.org/en-US/firefox/addon/english-gb-language-pack/"],
|
||||
["es-ES", "https://addons.mozilla.org/en-US/firefox/addon/espa%C3%B1ol-espa%C3%B1a-language-pac/"],
|
||||
["eu", "https://addons.mozilla.org/en-US/firefox/addon/basque-language-pack/", "eu-ES"],
|
||||
["fr", "https://addons.mozilla.org/en-US/firefox/addon/fran%C3%A7ais-language-pack/", "fr-FR"],
|
||||
["et", "https://addons.mozilla.org/en-US/firefox/addon/eesti-et-keele-pakk/", "et-EE"],
|
||||
["eu", "https://addons.mozilla.org/en-US/firefox/addon/basque-language-pack/", "eu-ES"],
|
||||
["fa", "https://addons.mozilla.org/en-US/firefox/addon/persian-ir-%D9%81%D8%A7%D8%B1%D8%B3%DB%8C-%D8%A7%DB%8C%D8%B1%D8%A7%D9%86-lang/"],
|
||||
["fi", "https://addons.mozilla.org/en-US/firefox/addon/finnish-language-pack/", "fi-FI"],
|
||||
["gl", "https://addons.mozilla.org/en-US/firefox/addon/galician-galiza-language-pack/", "gl-ES"],
|
||||
["he", "https://addons.mozilla.org/en-US/firefox/addon/hebrew-il-language-pack/", "he-IL"],
|
||||
["hr", "https://addons.mozilla.org/en-US/firefox/addon/hrvatski-hr-language-pack/", "hr-HR"],
|
||||
["hu", "https://addons.mozilla.org/en-US/firefox/addon/magyar-nyelvi/", "hu-HU"],
|
||||
["id", "https://addons.mozilla.org/en-US/firefox/addon/indonesian-langpack/", "id-ID"],
|
||||
["is", "https://addons.mozilla.org/en-US/firefox/addon/icelandic-is-language-pack/", "is-IS"],
|
||||
["it", "https://addons.mozilla.org/en-US/firefox/addon/italiano-it-language-pack/", "it-IT"],
|
||||
["ja", "https://addons.mozilla.org/en-US/firefox/addon/japanese-language-pack-1/", "ja-JP"],
|
||||
["km", "https://addons.mozilla.org/en-US/firefox/addon/%E1%9E%81%E1%9E%98%E1%9E%9A-language-pack/"],
|
||||
["ko", "https://addons.mozilla.org/en-US/firefox/addon/korean-kr-language-pack/", "ko-KR"],
|
||||
["lt", "https://addons.mozilla.org/en-US/firefox/addon/lietuvi%C5%B3-language-pack/", "lt-LT"],
|
||||
["nb-NO", "https://addons.mozilla.org/en-US/firefox/addon/norsk-bokm%C3%A5l-no-language-pa/"],
|
||||
["nn-NO", "https://addons.mozilla.org/en-US/firefox/addon/norsk-nynorsk-no-language-p/"],
|
||||
["nl", "https://addons.mozilla.org/en-US/firefox/addon/nederlands-nl-language-pack/", "nl-NL"],
|
||||
["pl", "https://addons.mozilla.org/en-US/firefox/addon/polski-language-pack/", "pl-PL"],
|
||||
["pt-BR", "https://addons.mozilla.org/en-US/firefox/addon/firefox-br/"],
|
||||
["pt-PT", "https://addons.mozilla.org/en-US/firefox/addon/portugu%C3%AAs-portugal-language/"],
|
||||
["ro", "https://addons.mozilla.org/en-US/firefox/addon/romanian-language-pack/", "ro-RO"],
|
||||
["ru", "https://addons.mozilla.org/en-US/firefox/addon/russian-ru-language-pack/", "ru-RU"],
|
||||
["sk", "https://addons.mozilla.org/en-US/firefox/addon/slovak-sk-language-pack/", "sk-SK"],
|
||||
["sl", "https://addons.mozilla.org/en-US/firefox/addon/slovenski-jezik-language-pa/", "sl-SI"],
|
||||
["sr", "https://addons.mozilla.org/en-US/firefox/addon/%D1%81%D1%80%D0%BF%D1%81%D0%BA%D0%B8-sr-language-pack/", "sr-RS"],
|
||||
["sv-SE", "https://addons.mozilla.org/en-US/firefox/addon/svenska-se-language-pack/", "sv-SE"],
|
||||
["th", "https://addons.mozilla.org/en-US/firefox/addon/thai-language-pack/", "th-TH"],
|
||||
["tr", "https://addons.mozilla.org/en-US/firefox/addon/t%C3%BCrk%C3%A7e-tr-language-pack/", "tr-TR"],
|
||||
["uk", "https://addons.mozilla.org/en-US/firefox/addon/ukrainian-language-pack/", "uk-UA"],
|
||||
["vi", "https://addons.mozilla.org/en-US/firefox/addon/vietnamese-language-pack/", "vi-VN"],
|
||||
["zh-CN", "https://addons.mozilla.org/en-US/firefox/addon/chinese-simplified-zh-cn-la/"],
|
||||
["zh-TW", "https://addons.mozilla.org/en-US/firefox/addon/traditional-chinese-zh-tw-l/"]
|
||||
]
|
||||
|
||||
# Can be a string or a dict containing 'file' and 'patterns'
|
||||
entries = [
|
||||
#"chrome/{0}/locale/{0}/global/dateFormat.properties",
|
||||
"chrome/{0}/locale/{0}/global/editMenuOverlay.dtd",
|
||||
{
|
||||
"file": "chrome/{0}/locale/{0}/global/intl.properties",
|
||||
"grep_patterns": [
|
||||
"^pluralRule",
|
||||
"^intl.accept_languages"
|
||||
]
|
||||
},
|
||||
{
|
||||
"file": "browser/chrome/{0}/locale/browser/browser.dtd",
|
||||
"grep_patterns": [
|
||||
"preferencesCmd"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
extracted_files = {}
|
||||
for entry in entries:
|
||||
entry_filename = get_entry_filename(entry)
|
||||
if entry_filename != 'dateFormat.properties':
|
||||
extracted_files[entry_filename] = set()
|
||||
|
||||
date_formats = {}
|
||||
|
||||
first = True
|
||||
for pack in language_packs:
|
||||
lang, url, *rest = pack
|
||||
|
||||
if len(rest):
|
||||
locale = rest[0]
|
||||
else:
|
||||
locale = lang
|
||||
|
||||
pack_dir = os.path.join(script_dir, 'packs', lang)
|
||||
locale_dir = os.path.join(locales_dir, locale, 'zotero', 'mozilla')
|
||||
|
||||
if not os.path.exists(pack_dir):
|
||||
os.makedirs(pack_dir)
|
||||
if not os.path.exists(locale_dir):
|
||||
os.mkdir(locale_dir)
|
||||
|
||||
if not first:
|
||||
print()
|
||||
first = False
|
||||
|
||||
print("Loading from " + url)
|
||||
with urllib.request.urlopen(url) as response:
|
||||
code = response.getcode()
|
||||
if code != 200:
|
||||
sys.sterr.write("Got {0} for {1}\n".format(code, url))
|
||||
return 1
|
||||
|
||||
html = str(response.read(), 'utf-8')
|
||||
xpi_url = json.loads(
|
||||
'"' + re.match('.*(https:[^"]+\.xpi)', html, flags=re.DOTALL).group(1) + '"'
|
||||
)
|
||||
file_name = xpi_url.split("/")[-1]
|
||||
pack_file = os.path.join(pack_dir, file_name)
|
||||
|
||||
if use_cache and os.path.isfile(pack_file):
|
||||
print("Using cached file for " + pack_file)
|
||||
else:
|
||||
print("Downloading " + xpi_url)
|
||||
with urllib.request.urlopen(xpi_url) as response, open(pack_file, 'wb') as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
# Unzip selected files straight to target locale dirs
|
||||
with zipfile.ZipFile(pack_file, "r") as zip_ref:
|
||||
for entry in entries:
|
||||
entry_path = get_entry_path(entry)
|
||||
entry_filename = get_entry_filename(entry)
|
||||
formatted_path = entry_path.format(lang)
|
||||
|
||||
try:
|
||||
zip_ref.getinfo(formatted_path)
|
||||
except KeyError:
|
||||
print("'{0}' not found in {1}".format(formatted_path, pack_file))
|
||||
continue
|
||||
|
||||
with zip_ref.open(formatted_path) as source:
|
||||
# Add date formats to JSON object
|
||||
if entry_filename == 'dateFormat.properties':
|
||||
date_formats[locale] = {
|
||||
"short": [],
|
||||
"long": []
|
||||
}
|
||||
pattern = re.compile(r"^month\.\d+\.(Mmm|name)\s*=\s*(.+)$")
|
||||
for line in source:
|
||||
matches = pattern.match(str(line, 'utf-8'))
|
||||
if matches:
|
||||
if matches.group(1) == "Mmm":
|
||||
date_formats[locale]["short"].append(
|
||||
matches.group(2).strip()
|
||||
)
|
||||
else:
|
||||
date_formats[locale]["long"].append(
|
||||
matches.group(2).strip()
|
||||
)
|
||||
# Extract other files
|
||||
else:
|
||||
target_path = os.path.join(locale_dir, entry_filename)
|
||||
print("Extracting " + target_path)
|
||||
with open(target_path, "wb") as target:
|
||||
copied = False
|
||||
|
||||
# Filter lines based on grep patterns
|
||||
if isinstance(entry, dict) and entry['grep_patterns']:
|
||||
lines_to_write = []
|
||||
|
||||
for line in source:
|
||||
line_str = str(line, 'utf-8')
|
||||
for p in entry['grep_patterns']:
|
||||
if re.search(re.compile(p), line_str):
|
||||
lines_to_write.append(line)
|
||||
continue
|
||||
|
||||
if len(lines_to_write):
|
||||
# BOM is required for Firefox to read .dtd files
|
||||
use_bom = entry_filename.endswith('.dtd')
|
||||
|
||||
target.write(
|
||||
('\ufeff'.encode('utf-8') if use_bom else b'')
|
||||
+ b''.join(lines_to_write)
|
||||
)
|
||||
copied = True
|
||||
|
||||
# Copy file directly
|
||||
else:
|
||||
shutil.copyfileobj(source, target)
|
||||
copied = True
|
||||
|
||||
if copied:
|
||||
extracted_files[entry_filename].add(locale)
|
||||
|
||||
print()
|
||||
|
||||
# Copy en-US files to any locales that didn't have localized versions
|
||||
en_locale_dir = os.path.join(locales_dir, 'en-US', 'zotero', 'mozilla')
|
||||
locales = set(os.listdir(locales_dir))
|
||||
locales -= set(['.DS_Store'])
|
||||
for entry_filename, existing_locales in extracted_files.items():
|
||||
missing = locales.difference(existing_locales)
|
||||
|
||||
for locale in missing:
|
||||
dest_dir = os.path.join(locales_dir, locale, 'zotero', 'mozilla')
|
||||
dest_file = os.path.join(dest_dir, entry_filename)
|
||||
if not os.path.exists(dest_dir):
|
||||
os.mkdir(dest_dir)
|
||||
source_file = os.path.join(en_locale_dir, entry_filename)
|
||||
print("Copying en-US {0} to {1}".format(entry_filename, dest_file))
|
||||
shutil.copyfile(source_file, dest_file)
|
||||
|
||||
print()
|
||||
|
||||
# Date format overrides
|
||||
# https://github.com/zotero/zotero/pull/1156
|
||||
#date_formats['fr-FR']['short'][5] = 'juin'
|
||||
#date_formats['fr-FR']['short'][6] = 'juil'
|
||||
#date_formats['fr-FR']['short'][11] = 'déc'
|
||||
|
||||
# Write dateFormats.json from the extracted data
|
||||
#date_formats_path = os.path.join(schema_dir, 'dateFormats.json')
|
||||
#print("Writing " + date_formats_path);
|
||||
#with open(date_formats_path, 'w') as f:
|
||||
# f.write(json.dumps(date_formats, indent='\t', ensure_ascii=False))
|
||||
print("WARNING: dateFormat.properties is no longer processed\n")
|
||||
|
||||
print("Check results before committing!")
|
||||
|
||||
|
||||
def get_entry_path(entry):
|
||||
if isinstance(entry, str):
|
||||
path = entry
|
||||
else:
|
||||
path = entry['file']
|
||||
return path
|
||||
|
||||
|
||||
def get_entry_filename(entry):
|
||||
path = get_entry_path(entry)
|
||||
return path.split("/")[-1]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
41
scripts/locale/run
Executable file
41
scripts/locale/run
Executable file
|
@ -0,0 +1,41 @@
|
|||
#!/bin/sh
|
||||
|
||||
if [ ! -d "$1/chrome/locale/en-US/zotero" ]; then
|
||||
echo "Usage: $0 /path/to/zotero"
|
||||
exit
|
||||
fi
|
||||
|
||||
BASEDIR=`dirname $0`
|
||||
cd $BASEDIR
|
||||
BASEDIR=`pwd`
|
||||
WORKDIR=$BASEDIR/work
|
||||
ROOT_DIR="$1"
|
||||
LOCALES_DIR="$1/chrome/locale"
|
||||
|
||||
cd $WORKDIR
|
||||
# Create temporary directories for merge script
|
||||
rm -rf en-US-new locales content-locales output
|
||||
mkdir en-US-new locales content-locales output
|
||||
cp -R $LOCALES_DIR/en-US/zotero/* en-US-new
|
||||
cp -R $LOCALES_DIR/ locales/
|
||||
rm -rf locales/en-US
|
||||
# Correct various errors
|
||||
find locales -name '*.dtd' -exec perl -pi -e "s/&([^\s])/&\$1/g" {} \;
|
||||
find locales -name '*.properties' -exec rpl 'S%' '%S' {} \;
|
||||
find locales -name '*.properties' -exec rpl '/n' '\n' {} \;
|
||||
find locales -name '*.properties' -exec rpl '\\' '\' {} \;
|
||||
find locales -name '*.properties' -exec rpl '\n\n\(?!n)' '\n\n' {} \;
|
||||
find locales -name '*.properties' -exec rpl '\\' '\' {} \;
|
||||
|
||||
# Run merge script
|
||||
$BASEDIR/localizer
|
||||
rsync -a --progress --verbose $WORKDIR/output/locale/ $LOCALES_DIR/
|
||||
|
||||
rpl -R ⏎ '\n' "$LOCALES_DIR"
|
||||
|
||||
rm -rf en-US-new locales content-locales output
|
||||
|
||||
$BASEDIR/filter_connector_json "$LOCALES_DIR"
|
||||
|
||||
cd "$ROOT_DIR/resource/schema/global"
|
||||
./scripts/update-schema
|
4
scripts/locale/work/.gitignore
vendored
Normal file
4
scripts/locale/work/.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
42
scripts/make-z-icons
Executable file
42
scripts/make-z-icons
Executable file
|
@ -0,0 +1,42 @@
|
|||
#!/bin/bash -e
|
||||
|
||||
#
|
||||
# Generate platform-specific PNG files from SVG files for the main toolbar button
|
||||
#
|
||||
|
||||
ROOT="$1"
|
||||
SVG_ROOT="$ROOT/chrome/skin/default/zotero/zotero-z-32px-australis"
|
||||
PLATFORM_PATH="$ROOT/chrome/content/zotero-platform"
|
||||
|
||||
if [ -z "$ROOT" ]; then
|
||||
echo "Usage: $0 /path/to/zotero"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$ROOT" ]; then
|
||||
echo "$1 is not a directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$SVG_ROOT-mac.svg" ]; then
|
||||
echo "SVG file not found at $SVG-mac.svg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Mac
|
||||
PLAT=mac
|
||||
rm -f "$PLATFORM_PATH/$PLAT/"zotero-z-*-australis.png
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-16px-australis.png" -w 16 -h 16
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-32px-australis.png" -w 32 -h 32
|
||||
|
||||
# Windows
|
||||
PLAT=win
|
||||
rm -f "$PLATFORM_PATH/$PLAT/"zotero-z-*-australis.png
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-16px-australis.png" -w 16 -h 16
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-32px-australis.png" -w 32 -h 32
|
||||
|
||||
# Linux
|
||||
PLAT=unix
|
||||
rm -f "$PLATFORM_PATH/$PLAT/"zotero-z-*-australis.png
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-16px-australis.png" -w 16 -h 16
|
||||
svg2png $SVG_ROOT-$PLAT.svg --output "$PLATFORM_PATH/$PLAT/zotero-z-32px-australis.png" -w 32 -h 32
|
34
scripts/update-citeproc.sh
Executable file
34
scripts/update-citeproc.sh
Executable file
|
@ -0,0 +1,34 @@
|
|||
#!/bin/bash -e
|
||||
|
||||
ZOTEROSRC="../zotero"
|
||||
if [ -e "./config.sh" ]; then
|
||||
. ./config.sh
|
||||
fi
|
||||
|
||||
tag=master
|
||||
if [ -n "$1" ]; then
|
||||
tag=$1
|
||||
fi
|
||||
echo Downloading tag $tag
|
||||
sleep 2
|
||||
|
||||
outFile="$ZOTEROSRC/chrome/content/zotero/xpcom/citeproc.js"
|
||||
|
||||
if [ ! -e "$outFile" ]; then
|
||||
>&2 echo "$outFile not found. Looking for Zotero source in $(readlink -f $ZOTEROSRC)"
|
||||
exit 78 # EX_CONFIG: configuration error (from sysexits.h)
|
||||
fi
|
||||
|
||||
curl -f https://raw.githubusercontent.com/Juris-M/citeproc-js/$tag/citeproc.js > "$outFile"
|
||||
|
||||
echo
|
||||
|
||||
if [ `command -v acorn` ]; then
|
||||
echo "Verifying file..."
|
||||
acorn --silent "$outFile"
|
||||
if [ $? = 0 ]; then
|
||||
echo "OK"
|
||||
fi
|
||||
else
|
||||
echo "Warning: acorn isn't installed -- not verifying file"
|
||||
fi
|
328
scripts/xpi/build_xpi
Executable file
328
scripts/xpi/build_xpi
Executable file
|
@ -0,0 +1,328 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
import fileinput
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
# Hack to combine two argparse formatters
|
||||
class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Build a Zotero XPI',
|
||||
formatter_class=CustomFormatter,
|
||||
epilog='''
|
||||
Example: build_xpi -s ~/zotero-client/build -x 5.0.1 -z
|
||||
- Builds zotero-build.xpi and update-build.rdf
|
||||
- Points update-build.rdf to https://download.zotero.org/extension/zotero-5.0.1.xpi
|
||||
- Points install.rdf to https://www.zotero.org/download/update.rdf
|
||||
|
||||
Example: build_xpi -s ~/zotero-client/build -c beta -m 7c27a9bb5 -x 5.0b2 -r beta -z
|
||||
- Builds zotero-build.xpi and update-build.rdf
|
||||
- Points update-build.rdf to https://download.zotero.org/extension/zotero-5.0b2.xpi
|
||||
- Points install.rdf to https://www.zotero.org/download/update-beta.rdf
|
||||
|
||||
Example: build_xpi -s ~/zotero-client/build -c alpha -m 7c27a9bb5 -x 5.0-alpha -r 5.0-branch --xpi-dir dev -z
|
||||
- Builds zotero-build.xpi and update-build.rdf
|
||||
- Points update-build.rdf to https://download.zotero.org/extension/dev/zotero-5.0-alpha.xpi
|
||||
- Points install.rdf to https://zotero.org/download/dev/update-5.0-branch.rdf''')
|
||||
|
||||
parser.add_argument('--source-dir', '-s', required=True, metavar='DIR', help='Directory to build from')
|
||||
parser.add_argument('-c', '--channel', default='source', help='channel to add to dev build version number (e.g., "beta" for "5.0-beta.3+a5f28ca8"), or "release" or "source" to skip')
|
||||
parser.add_argument('--commit-hash', '-m', metavar='HASH', help='Commit hash (required for non-release builds)')
|
||||
parser.add_argument('--build-suffix', metavar='SUFFIX', default='build', help='suffix of output XPI')
|
||||
parser.add_argument('--xpi-suffix', '-x', metavar='SUFFIX', default='', help='suffix of XPI to reference in update.rdf')
|
||||
parser.add_argument('--rdf-suffix', '-r', metavar='SUFFIX', default='', help='suffix of update.rdf file to reference in install.rdf (e.g., "beta" for "update-beta.rdf")')
|
||||
parser.add_argument('--xpi-dir', metavar='DIR', default='', help='extra directory to point to when referencing the XPI in update.rdf')
|
||||
parser.add_argument('--zip', '-z', action='store_true', help="Create XPI instead of leaving files in build/staging")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
def main():
|
||||
try:
|
||||
if args.xpi_suffix:
|
||||
args.xpi_suffix = "-" + args.xpi_suffix
|
||||
if args.rdf_suffix:
|
||||
args.rdf_suffix = "-" + args.rdf_suffix
|
||||
if args.build_suffix:
|
||||
args.build_suffix = "-" + args.build_suffix
|
||||
|
||||
root_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Use BUILD_DIR environmental variable if present, and otherwise ./build
|
||||
build_dir = os.environ.get('BUILD_DIR', os.path.join(root_dir, 'build'))
|
||||
tmp_dir = os.path.join(build_dir, 'tmp')
|
||||
|
||||
if not os.path.isdir(build_dir):
|
||||
raise Exception(build_dir + " is not a directory")
|
||||
|
||||
src_dir = args.source_dir
|
||||
if not os.path.isdir(src_dir):
|
||||
raise Exception(src_dir + " is not a directory")
|
||||
|
||||
if args.commit_hash:
|
||||
commit_hash = args.commit_hash[0:9]
|
||||
elif args.channel != "release":
|
||||
raise Exception("--commit-hash must be specified for non-release builds")
|
||||
|
||||
log("Using source directory of " + src_dir)
|
||||
os.chdir(src_dir)
|
||||
|
||||
if not os.path.exists('install.rdf'):
|
||||
raise FileNotFoundError("install.rdf not found in {0}".format(src_dir))
|
||||
|
||||
# Extract version number from install.rdf
|
||||
with open('install.rdf') as f:
|
||||
rdf = f.read()
|
||||
m = re.search('version>([0-9].+)\\.SOURCE</', rdf)
|
||||
if not m:
|
||||
raise Exception("Version number not found in install.rdf")
|
||||
version = m.group(1)
|
||||
|
||||
# Determine build targets
|
||||
target_xpi_file = os.path.join(
|
||||
build_dir, 'zotero' + args.build_suffix + '.xpi'
|
||||
)
|
||||
target_update_file = os.path.join(
|
||||
build_dir, 'update' + args.build_suffix + '.rdf'
|
||||
)
|
||||
staging_dir = os.path.join(build_dir, 'staging')
|
||||
|
||||
# Delete any existing build targets
|
||||
try:
|
||||
os.remove(target_xpi_file)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
os.remove(target_update_file)
|
||||
except OSError:
|
||||
pass
|
||||
if os.path.exists(staging_dir):
|
||||
shutil.rmtree(staging_dir)
|
||||
|
||||
# Remove tmp build directory if it already exists
|
||||
if os.path.exists(tmp_dir):
|
||||
shutil.rmtree(tmp_dir)
|
||||
os.mkdir(tmp_dir)
|
||||
tmp_src_dir = os.path.join(tmp_dir, 'zotero')
|
||||
|
||||
# Export a clean copy of the source tree
|
||||
subprocess.check_call([
|
||||
'rsync',
|
||||
'-aL',
|
||||
# Exclude hidden files
|
||||
'--exclude', '.*',
|
||||
'--exclude', '#*',
|
||||
'--exclude', 'package.json',
|
||||
'--exclude', 'package-lock.json',
|
||||
'.' + os.sep,
|
||||
tmp_src_dir + os.sep
|
||||
])
|
||||
|
||||
# Make sure rsync worked
|
||||
d = os.path.join(tmp_src_dir, 'chrome')
|
||||
if not os.path.isdir(d):
|
||||
raise FileNotFoundError(d + " not found")
|
||||
|
||||
log("Deleting CSL locale support files")
|
||||
subprocess.check_call([
|
||||
'find',
|
||||
os.path.normpath(tmp_src_dir + '/chrome/content/zotero/locale/csl/'),
|
||||
'-mindepth', '1',
|
||||
'!', '-name', '*.xml',
|
||||
'!', '-name', 'locales.json',
|
||||
'-print',
|
||||
'-delete'
|
||||
])
|
||||
|
||||
# Delete styles build script
|
||||
os.remove(os.path.join(tmp_src_dir, 'styles', 'update'))
|
||||
|
||||
translators_dir = os.path.join(tmp_src_dir, 'translators')
|
||||
|
||||
# Move deleted.txt out of translators directory
|
||||
f = os.path.join(translators_dir, 'deleted.txt')
|
||||
if os.path.exists(f):
|
||||
shutil.move(f, tmp_src_dir)
|
||||
|
||||
# Build translator index
|
||||
index = OrderedDict()
|
||||
for fn in sorted((fn for fn in os.listdir(translators_dir)), key=str.lower):
|
||||
if not fn.endswith('.js'):
|
||||
continue
|
||||
with open(os.path.join(translators_dir, fn), 'r', encoding='utf-8') as f:
|
||||
contents = f.read()
|
||||
# Parse out the JSON metadata block
|
||||
m = re.match('^\s*{[\S\s]*?}\s*?[\r\n]', contents)
|
||||
if not m:
|
||||
raise Exception("Metadata block not found in " + f.name)
|
||||
metadata = json.loads(m.group(0))
|
||||
index[metadata["translatorID"]] = {
|
||||
"fileName": fn,
|
||||
"label": metadata["label"],
|
||||
"lastUpdated": metadata["lastUpdated"]
|
||||
}
|
||||
|
||||
# Unminify translator framework lines
|
||||
with open(os.path.join(root_dir, 'zotero-transfw', 'framework.js')) as f:
|
||||
framework_contents = f.read()
|
||||
with open(os.path.join(translators_dir, 'fw.js'), 'w') as f:
|
||||
f.write(
|
||||
"/*********************** BEGIN FRAMEWORK ***********************/\n"
|
||||
+ framework_contents
|
||||
+ "\n/*********************** END FRAMEWORK ***********************/\n"
|
||||
)
|
||||
os.chdir(translators_dir)
|
||||
subprocess.check_call(
|
||||
"perl -pe 's/.+FW LINE.+/`cat fw.js`/ge' -i *.js", shell=True
|
||||
)
|
||||
os.remove('fw.js')
|
||||
|
||||
# Write translator index as JSON file
|
||||
with open(os.path.join(tmp_src_dir, 'translators.json'), 'w', encoding='utf-8') as f:
|
||||
json.dump(index, f, indent=True, ensure_ascii=False)
|
||||
|
||||
install_file = os.path.join(tmp_src_dir, 'install.rdf')
|
||||
update_file = os.path.join(tmp_src_dir, 'update.rdf')
|
||||
|
||||
log_line()
|
||||
log('Original install.rdf:')
|
||||
dump_file(install_file)
|
||||
if args.zip:
|
||||
log('Original update.rdf:\n')
|
||||
dump_file(update_file)
|
||||
log_line()
|
||||
|
||||
# Modify install.rdf and update.rdf as necessary
|
||||
|
||||
# The dev build revision number is stored in build/lastrev-{version}-{channel}.
|
||||
#
|
||||
# If we're including it, get the current version number and increment it.
|
||||
if args.channel not in ["release", "source"]:
|
||||
lastrev_file = os.path.join(
|
||||
build_dir, 'lastrev-{0}-{1}'.format(version, args.channel)
|
||||
)
|
||||
if not os.path.exists(lastrev_file):
|
||||
with open(lastrev_file, 'w') as f:
|
||||
f.write("0")
|
||||
rev = 1
|
||||
else:
|
||||
with open(lastrev_file, 'r') as f:
|
||||
rev = f.read()
|
||||
rev = int(rev if rev else 0) + 1
|
||||
|
||||
if args.channel == "release":
|
||||
rev_sub_str = ""
|
||||
elif args.channel == "source":
|
||||
rev_sub_str = ".SOURCE.{0}".format(commit_hash)
|
||||
else:
|
||||
rev_sub_str = "-{0}.{1}+{2}".format(args.channel, str(rev), commit_hash)
|
||||
if args.xpi_dir:
|
||||
xpi_dir = args.xpi_dir + '/'
|
||||
else:
|
||||
xpi_dir = ''
|
||||
# Update install.rdf and update.rdf
|
||||
for line in fileinput.FileInput(install_file, inplace=1):
|
||||
line = line.replace('.SOURCE', rev_sub_str)
|
||||
line = line.replace(
|
||||
'update-source.rdf',
|
||||
xpi_dir + 'update' + args.rdf_suffix + '.rdf'
|
||||
)
|
||||
print(line, file=sys.stdout, end='')
|
||||
for line in fileinput.FileInput(update_file, inplace=1):
|
||||
line = line.replace(".SOURCE", rev_sub_str)
|
||||
line = line.replace(
|
||||
'zotero.xpi',
|
||||
xpi_dir + 'zotero' + args.xpi_suffix + '.xpi'
|
||||
)
|
||||
print(line, file=sys.stdout, end='')
|
||||
|
||||
log_line()
|
||||
log('Modified install.rdf:\n')
|
||||
dump_file(install_file)
|
||||
|
||||
# Create XPI
|
||||
if args.zip:
|
||||
# Move update.rdf out of code root
|
||||
shutil.move(update_file, tmp_dir)
|
||||
tmp_update_file = os.path.join(tmp_dir, 'update.rdf')
|
||||
|
||||
os.chdir(tmp_src_dir)
|
||||
tmp_xpi_file = os.path.join(tmp_dir, 'zotero' + args.build_suffix + '.xpi')
|
||||
subprocess.check_call(['zip', '-r', tmp_xpi_file, '.'])
|
||||
|
||||
# Add SHA1 of XPI to update.rdf
|
||||
sha1 = sha1file(tmp_xpi_file)
|
||||
for line in fileinput.FileInput(tmp_update_file, inplace=1):
|
||||
line = line.replace("sha1:", "sha1:" + sha1)
|
||||
print(line, file=sys.stdout, end='')
|
||||
|
||||
log('Modified update.rdf:\n')
|
||||
dump_file(tmp_update_file)
|
||||
log_line()
|
||||
|
||||
# Move files to build directory
|
||||
os.rename(tmp_xpi_file, target_xpi_file)
|
||||
os.rename(tmp_update_file, target_update_file)
|
||||
|
||||
log("")
|
||||
log("zotero{0}.xpi and update{0}.rdf saved to {1}".format(args.build_suffix, build_dir))
|
||||
log("")
|
||||
# Leave unzipped in staging directory
|
||||
else:
|
||||
# Don't create update.rdf
|
||||
os.remove(update_file)
|
||||
|
||||
# Move source files to staging
|
||||
shutil.move(tmp_src_dir, staging_dir)
|
||||
|
||||
log("")
|
||||
log("Build files saved to {0}".format(staging_dir))
|
||||
log("")
|
||||
|
||||
# Update lastrev file with new revision number
|
||||
if args.channel not in ["release", "source"]:
|
||||
with open(lastrev_file, 'w') as f:
|
||||
f.write(str(rev))
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as err:
|
||||
sys.stderr.write("\n" + traceback.format_exc())
|
||||
return 1
|
||||
|
||||
# Clean up
|
||||
finally:
|
||||
if os.path.exists(tmp_dir):
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
|
||||
def dump_file(f):
|
||||
with open(f, 'r') as f:
|
||||
log(f.read())
|
||||
|
||||
|
||||
def log(msg):
|
||||
print(msg, file=sys.stdout)
|
||||
|
||||
|
||||
def log_line():
|
||||
log('======================================================\n\n')
|
||||
|
||||
def sha1file(f):
|
||||
sha1 = hashlib.sha1()
|
||||
with open(f, 'rb') as f:
|
||||
sha1.update(f.read())
|
||||
return sha1.hexdigest()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
Loading…
Add table
Reference in a new issue