Add zotero-standalone-build repo as app folder

Minus obsolete 4.0 files
This commit is contained in:
Dan Stillman 2023-04-23 03:57:55 -04:00 committed by Dan Stillman
parent ae0091fbae
commit a3d7b58b83
299 changed files with 39600 additions and 0 deletions

9
.gitmodules vendored
View file

@ -47,3 +47,12 @@
[submodule "chrome/content/zotero/xpcom/translate"]
path = chrome/content/zotero/xpcom/translate
url = https://github.com/zotero/translate.git
[submodule "app/modules/zotero-word-for-mac-integration"]
path = app/modules/zotero-word-for-mac-integration
url = https://github.com/zotero/zotero-word-for-mac-integration.git
[submodule "app/modules/zotero-word-for-windows-integration"]
path = app/modules/zotero-word-for-windows-integration
url = https://github.com/zotero/zotero-word-for-windows-integration.git
[submodule "app/modules/zotero-libreoffice-integration"]
path = app/modules/zotero-libreoffice-integration
url = https://github.com/zotero/zotero-libreoffice-integration.git

10
app/.gitignore vendored Normal file
View file

@ -0,0 +1,10 @@
*~
cache
config-custom.sh
dist
staging
xulrunner
pdftools
win/resource_hacker
win/firefox-*.win32.zip
win/firefox-*.win64.zip

4
app/README.md Normal file
View file

@ -0,0 +1,4 @@
# Zotero Standalone build utility
These files are used to bundle the [Zotero core](https://github.com/zotero/zotero) into distributable bundles for Mac, Windows, and Linux.
Instructions for building and packaging are available on the [Zotero wiki](https://www.zotero.org/support/dev/client_coding/building_the_standalone_client).

View file

@ -0,0 +1,18 @@
[App]
Vendor=Zotero
Name=Zotero
Version={{VERSION}}
BuildID={{BUILDID}}
Copyright=Copyright (c) 2006-2022 Contributors
ID=zotero@zotero.org
[Gecko]
MinVersion=102.0
MaxVersion=102.99.*
[XRE]
EnableExtensionManager=1
EnableProfileMigrator=1
[AppUpdate]
URL=https://www.zotero.org/download/client/update/%VERSION%/%BUILD_ID%/%BUILD_TARGET%/%LOCALE%/%CHANNEL%/%OS_VERSION%/update.xml

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.7 KiB

View file

@ -0,0 +1 @@
<!ENTITY brandShortName "Zotero">

View file

@ -0,0 +1,6 @@
-brand-shorter-name = Zotero
-brand-short-name = Zotero
-brand-full-name = Zotero
-brand-product-name = Zotero
-vendor-short-name = Zotero
trademarkInfo = Zotero is a trademark of the Corporation for Digital Scholarship.

View file

@ -0,0 +1,3 @@
brandShorterName=Zotero
brandShortName=Zotero
brandFullName=Zotero

View file

@ -0,0 +1,3 @@
locale branding en-US chrome/en-US/locale/branding/
content branding chrome/branding/content/
skin browser preferences chrome/skin/

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 KiB

View file

@ -0,0 +1,242 @@
/*
# -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the Firefox Preferences System.
#
# The Initial Developer of the Original Code is
# Ben Goodger.
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Ben Goodger <ben@mozilla.org>
# Kevin Gerich <webmail@kmgerich.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
*/
.windowDialog {
padding: 12px;
font: -moz-dialog;
}
.paneSelector {
list-style-image: url("chrome://browser/skin/preferences/Options.png");
}
/* ----- GENERAL BUTTON ----- */
radio[pane=paneGeneral],
radio[pane=paneMain] {
-moz-image-region: rect(0px, 32px, 32px, 0px);
}
/* ----- TABS BUTTON ----- */
radio[pane=paneTabs] {
-moz-image-region: rect(0px, 64px, 32px, 32px);
}
/* ----- CONTENT BUTTON ----- */
radio[pane=paneContent] {
-moz-image-region: rect(0px, 96px, 32px, 64px);
}
/* ----- APPLICATIONS BUTTON ----- */
radio[pane=paneApplications] {
-moz-image-region: rect(0px, 128px, 32px, 96px);
}
/* ----- PRIVACY BUTTON ----- */
radio[pane=panePrivacy] {
-moz-image-region: rect(0px, 160px, 32px, 128px);
}
/* ----- SECURITY BUTTON ----- */
radio[pane=paneSecurity] {
-moz-image-region: rect(0px, 192px, 32px, 160px);
}
/* ----- ADVANCED BUTTON ----- */
radio[pane=paneAdvanced] {
-moz-image-region: rect(0px, 224px, 32px, 192px);
}
/* ----- SYNC BUTTON ----- */
radio[pane=paneSync] {
list-style-image: url("chrome://browser/skin/preferences/Options-sync.png");
}
/* ----- APPLICATIONS PREFPANE ----- */
#BrowserPreferences[animated="true"] #handlersView {
height: 25em;
}
#BrowserPreferences[animated="false"] #handlersView {
-moz-box-flex: 1;
}
description {
font: small-caption;
font-weight: normal;
line-height: 1.3em;
margin-bottom: 4px !important;
}
prefpane .groupbox-body {
-moz-appearance: none;
padding: 8px 4px 4px 4px;
}
#paneTabs > groupbox {
margin: 0;
}
#tabPrefsBox {
margin: 12px 4px;
}
prefpane .groupbox-title {
background: url("chrome://global/skin/50pct_transparent_grey.png") repeat-x bottom left;
margin-bottom: 4px;
}
tabpanels {
padding: 20px 7px 7px;
}
caption {
-moz-padding-start: 5px;
padding-top: 4px;
padding-bottom: 2px;
}
#paneMain description,
#paneContent description,
#paneAdvanced description,
#paneSecurity description {
font: -moz-dialog;
}
#paneContent {
padding-top: 8px;
}
#paneContent row {
padding: 2px 4px;
-moz-box-align: center;
}
#popupPolicyRow,
#enableSoftwareInstallRow,
#enableImagesRow {
margin-bottom: 4px !important;
padding-bottom: 4px !important;
border-bottom: 1px solid #ccc;
}
#browserUseCurrent,
#browserUseBookmark,
#browserUseBlank {
margin-top: 10px;
}
#advancedPrefs {
margin: 0 8px;
}
#privacyPrefs {
padding: 0 4px;
}
#privacyPrefs > tabpanels {
padding: 18px 10px 10px;
}
#OCSPDialogPane {
font: message-box !important;
}
/**
* Privacy Pane
*/
/* styles for the link elements copied from .text-link in global.css */
.inline-link {
color: -moz-nativehyperlinktext;
text-decoration: underline;
}
.inline-link:not(:focus) {
outline: 1px dotted transparent;
}
/**
* Update Preferences
*/
#autoInstallOptions {
-moz-margin-start: 20px;
}
.updateControls {
-moz-margin-start: 10px;
}
/**
* Clear Private Data
*/
#SanitizeDialogPane > groupbox {
margin-top: 0;
}
/* ----- SYNC PANE ----- */
#syncDesc {
padding: 0 8em;
}
#accountCaptionImage {
list-style-image: url("chrome://mozapps/skin/profile/profileicon.png");
}
#syncAddDeviceLabel {
margin-top: 1em;
margin-bottom: 1em;
}
#syncEnginesList {
height: 10em;
}

View file

@ -0,0 +1 @@
en-US,ar,bg-BG,br,ca-AD,cs-CZ,da-DK,de,el-GR,en-AU,en-CA,en-GB,en-NZ,es-ES,et-EE,eu-ES,fa,fi-FI,fr-FR,gl-ES,hu-HU,id-ID,is-IS,it-IT,ja-JP,km,ko-KR,lt-LT,nb-NO,nl-NL,pl-PL,pt-BR,pt-PT,ro-RO,ru-RU,sk-SK,sl-SI,sr-RS,sv-SE,th-TH,tr-TR,uk-UA,vi-VN,zh-CN,zh-TW

186
app/assets/prefs.js Normal file
View file

@ -0,0 +1,186 @@
// We only want a single window, I think
pref("toolkit.singletonWindowType", "navigator:browser");
// For debugging purposes, show errors in console by default
pref("javascript.options.showInConsole", true);
// Don't retrieve unrequested links when performing standalone translation
pref("network.prefetch-next", false);
// Let operations run as long as necessary
pref("dom.max_chrome_script_run_time", 0);
// .dotm Word plugin VBA uses this to find the running Zotero instance
pref("ui.window_class_override", "ZoteroWindowClass");
pref("intl.locale.requested", '');
pref("intl.regional_prefs.use_os_locales", false);
// Fix error initializing login manager after this was changed in Firefox 57
// Could also disable this with MOZ_LOADER_SHARE_GLOBAL, supposedly
pref("jsloader.shareGlobal", false);
// Needed due to https://bugzilla.mozilla.org/show_bug.cgi?id=1181977
pref("browser.hiddenWindowChromeURL", "chrome://zotero/content/standalone/hiddenWindow.xhtml");
// Use basicViewer for opening new DOM windows from content (for TinyMCE)
pref("browser.chromeURL", "chrome://zotero/content/standalone/basicViewer.xhtml");
// We need these to get the save dialog working with contentAreaUtils.js
pref("browser.download.useDownloadDir", false);
pref("browser.download.manager.showWhenStarting", false);
pref("browser.download.folderList", 1);
// Don't show add-on selection dialog
pref("extensions.shownSelectionUI", true);
pref("extensions.autoDisableScope", 11);
pref("network.protocol-handler.expose-all", false);
pref("network.protocol-handler.expose.zotero", true);
pref("network.protocol-handler.expose.http", true);
pref("network.protocol-handler.expose.https", true);
// Never go offline
pref("offline.autoDetect", false);
pref("network.manage-offline-status", false);
// Without this, we will throw up dialogs if asked to translate strange pages
pref("browser.xul.error_pages.enabled", true);
// Without this, scripts may decide to open popups
pref("dom.disable_open_during_load", true);
// Don't show security warning. The "warn_viewing_mixed" warning just lets the user know that some
// page elements were loaded over an insecure connection. This doesn't matter if all we're doing is
// scraping the page, since we don't provide any information to the site.
pref("security.warn_viewing_mixed", false);
// Preferences for add-on discovery
pref("extensions.getAddons.cache.enabled", false);
//pref("extensions.getAddons.maxResults", 15);
//pref("extensions.getAddons.get.url", "https://services.addons.mozilla.org/%LOCALE%/%APP%/api/%API_VERSION%/search/guid:%IDS%?src=thunderbird&appOS=%OS%&appVersion=%VERSION%&tMain=%TIME_MAIN%&tFirstPaint=%TIME_FIRST_PAINT%&tSessionRestored=%TIME_SESSION_RESTORED%");
//pref("extensions.getAddons.search.browseURL", "https://addons.mozilla.org/%LOCALE%/%APP%/search?q=%TERMS%");
//pref("extensions.getAddons.search.url", "https://services.addons.mozilla.org/%LOCALE%/%APP%/api/%API_VERSION%/search/%TERMS%/all/%MAX_RESULTS%/%OS%/%VERSION%?src=thunderbird");
//pref("extensions.webservice.discoverURL", "https://www.zotero.org/support/plugins");
// Check Windows certificate store for custom CAs
pref("security.enterprise_roots.enabled", true);
// Disable add-on signature checking with unbranded Firefox build
pref("xpinstall.signatures.required", false);
// Allow legacy extensions (though this might not be necessary)
pref("extensions.legacy.enabled", true);
// Allow installing XPIs from any host
pref("xpinstall.whitelist.required", false);
// Allow installing XPIs when using a custom CA
pref("extensions.install.requireBuiltInCerts", false);
pref("extensions.update.requireBuiltInCerts", false);
// Don't connect to the Mozilla extensions blocklist
pref("extensions.blocklist.enabled", false);
// Avoid warning in console when opening Tools -> Add-ons
pref("extensions.getAddons.link.url", "");
// Disable places
pref("places.history.enabled", false);
// Probably not used, but prevent an error in the console
pref("app.support.baseURL", "https://www.zotero.org/support/");
// Disable Telemetry, Health Report, error reporting, and remote settings
pref("toolkit.telemetry.unified", false);
pref("toolkit.telemetry.enabled", false);
pref("datareporting.policy.dataSubmissionEnabled", false);
pref("toolkit.crashreporter.enabled", false);
pref("extensions.remoteSettings.disabled", true);
pref("extensions.update.url", "");
// Don't try to load the "Get Add-ons" tab on first load of Add-ons window
pref("extensions.ui.lastCategory", "addons://list/extension");
/** The below is imported from https://developer.mozilla.org/en/XULRunner/Application_Update **/
// Whether or not app updates are enabled
pref("app.update.enabled", true);
// This preference turns on app.update.mode and allows automatic download and
// install to take place. We use a separate boolean toggle for this to make
// the UI easier to construct.
pref("app.update.auto", true);
// Defines how the Application Update Service notifies the user about updates:
//
// AUM Set to: Minor Releases: Major Releases:
// 0 download no prompt download no prompt
// 1 download no prompt download no prompt if no incompatibilities
// 2 download no prompt prompt
//
// See chart in nsUpdateService.js.in for more details
//
pref("app.update.mode", 2);
// If set to true, the Update Service will present no UI for any event.
pref("app.update.silent", false);
// URL user can browse to manually if for some reason all update installation
// attempts fail.
pref("app.update.url.manual", "https://www.zotero.org/download");
// A default value for the "More information about this update" link
// supplied in the "An update is available" page of the update wizard.
pref("app.update.url.details", "https://www.zotero.org/support/changelog");
// User-settable override to app.update.url for testing purposes.
//pref("app.update.url.override", "");
// Interval: Time between checks for a new version (in seconds)
// default=1 day
pref("app.update.interval", 86400);
// Interval: Time before prompting the user to download a new version that
// is available (in seconds) default=1 day
pref("app.update.nagTimer.download", 86400);
// Interval: Time before prompting the user to restart to install the latest
// download (in seconds) default=30 minutes
pref("app.update.nagTimer.restart", 1800);
// The minimum delay in seconds for the timer to fire.
// default=2 minutes
pref("app.update.timerMinimumDelay", 120);
// Whether or not we show a dialog box informing the user that the update was
// successfully applied. This is off in Firefox by default since we show a
// upgrade start page instead! Other apps may wish to show this UI, and supply
// a whatsNewURL field in their brand.properties that contains a link to a page
// which tells users what's new in this new update.
// This needs to be disabled since it makes us error out on update for some
// unknown reason
pref("app.update.showInstalledUI", false);
// 0 = suppress prompting for incompatibilities if there are updates available
// to newer versions of installed addons that resolve them.
// 1 = suppress prompting for incompatibilities only if there are VersionInfo
// updates available to installed addons that resolve them, not newer
// versions.
pref("app.update.incompatible.mode", 0);
// update channel for this build
pref("app.update.channel", "default");
// This should probably not be a preference that's used in toolkit....
pref("browser.preferences.instantApply", false);
// Allow elements to be displayed full-screen
pref("full-screen-api.enabled", true);
// Allow chrome access in DevTools
// This enables the input field in the Browser Console tool
pref("devtools.chrome.enabled", true);
// Default mousewheel action with Alt/Option is History Back/Forward in Firefox
// We don't have History navigation and users want to scroll the tree with Option
// key held down
pref("mousewheel.with_alt.action", 1);
// Use the system print dialog instead of the new tab-based print dialog in Firefox
pref("print.prefer_system_dialog", true);

View file

@ -0,0 +1,188 @@
%if 0
/*
# -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the Firefox Preferences System.
#
# The Initial Developer of the Original Code is
# Ben Goodger.
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Ben Goodger <ben@mozilla.org>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
*/
%endif
/* Global Styles */
#BrowserPreferences radio[pane] {
list-style-image: url("chrome://browser/skin/preferences/Options.png");
}
radio[pane=paneMain] {
-moz-image-region: rect(0px, 32px, 32px, 0px)
}
radio[pane=paneTabs] {
-moz-image-region: rect(0px, 64px, 32px, 32px)
}
radio[pane=paneContent] {
-moz-image-region: rect(0px, 96px, 32px, 64px)
}
radio[pane=paneApplications] {
-moz-image-region: rect(0px, 128px, 32px, 96px)
}
radio[pane=panePrivacy] {
-moz-image-region: rect(0px, 160px, 32px, 128px)
}
radio[pane=paneSecurity] {
-moz-image-region: rect(0px, 192px, 32px, 160px)
}
radio[pane=paneAdvanced] {
-moz-image-region: rect(0px, 224px, 32px, 192px)
}
%ifdef MOZ_SERVICES_SYNC
radio[pane=paneSync] {
list-style-image: url("chrome://browser/skin/preferences/Options-sync.png") !important;
}
%endif
/* Applications Pane */
#BrowserPreferences[animated="true"] #handlersView {
height: 25em;
}
#BrowserPreferences[animated="false"] #handlersView {
-moz-box-flex: 1;
}
/* Privacy Pane */
/* styles for the link elements copied from .text-link in global.css */
.inline-link {
color: -moz-nativehyperlinktext;
text-decoration: underline;
}
.inline-link:not(:focus) {
outline: 1px dotted transparent;
}
/* Modeless Window Dialogs */
.windowDialog,
.windowDialog prefpane {
padding: 0px;
}
.contentPane {
margin: 9px 8px 5px 8px;
}
.actionButtons {
margin: 0px 3px 6px 3px !important;
}
/* Cookies Manager */
#cookiesChildren::-moz-tree-image(domainCol) {
width: 16px;
height: 16px;
margin: 0px 2px;
list-style-image: url("chrome://mozapps/skin/places/defaultFavicon.png");
}
#paneApplications {
margin-left: 4px;
margin-right: 4px;
padding-left: 0;
padding-right: 0;
}
#linksOpenInBox {
margin-top: 5px;
}
#paneAdvanced {
padding-bottom: 10px;
}
#advancedPrefs {
margin-left: 0;
margin-right: 0;
}
#cookiesChildren::-moz-tree-image(domainCol, container) {
list-style-image: url("moz-icon://stock/gtk-directory?size=menu");
}
#cookieInfoBox {
border: 1px solid ThreeDShadow;
border-radius: 0px;
margin: 4px;
padding: 0px;
}
/* bottom-most box containing a groupbox in a prefpane. Prevents the bottom
of the groupbox from being cutoff */
.bottomBox {
padding-bottom: 4px;
}
/**
* Clear Private Data
*/
#SanitizeDialogPane > groupbox {
margin-top: 0;
}
%ifdef MOZ_SERVICES_SYNC
/* Sync Pane */
#syncDesc {
padding: 0 8em;
}
#accountCaptionImage {
list-style-image: url("chrome://mozapps/skin/profile/profileicon.png");
}
#syncAddDeviceLabel {
margin-top: 1em;
margin-bottom: 1em;
}
#syncEnginesList {
height: 10em;
}
%endif

4
app/assets/updater.ini Normal file
View file

@ -0,0 +1,4 @@
; This file is in the UTF-8 encoding
[Strings]
Title=Zotero Update
Info=Zotero is installing your updates and will start in a few moments…

View file

@ -0,0 +1,178 @@
/*
# -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the Firefox Preferences System.
#
# The Initial Developer of the Original Code is
# Ben Goodger.
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Ben Goodger <ben@mozilla.org>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
*/
/* Global Styles */
#BrowserPreferences radio[pane] {
list-style-image: url("chrome://browser/skin/preferences/Options.png");
padding: 5px 3px 1px;
}
radio[pane=paneMain] {
-moz-image-region: rect(0, 32px, 32px, 0);
}
radio[pane=paneTabs] {
-moz-image-region: rect(0, 64px, 32px, 32px);
}
radio[pane=paneContent] {
-moz-image-region: rect(0, 96px, 32px, 64px);
}
radio[pane=paneApplications] {
-moz-image-region: rect(0, 128px, 32px, 96px);
}
radio[pane=panePrivacy] {
-moz-image-region: rect(0, 160px, 32px, 128px);
}
radio[pane=paneSecurity] {
-moz-image-region: rect(0, 192px, 32px, 160px);
}
radio[pane=paneAdvanced] {
-moz-image-region: rect(0, 224px, 32px, 192px);
}
%ifdef MOZ_SERVICES_SYNC
radio[pane=paneSync] {
list-style-image: url("chrome://browser/skin/preferences/Options-sync.png") !important;
}
%endif
/* Applications Pane */
#BrowserPreferences[animated="true"] #handlersView {
height: 25em;
}
#BrowserPreferences[animated="false"] #handlersView {
-moz-box-flex: 1;
}
/* Privacy Pane */
/* styles for the link elements copied from .text-link in global.css */
.inline-link {
color: -moz-nativehyperlinktext;
text-decoration: underline;
}
.inline-link:not(:focus) {
outline: 1px dotted transparent;
}
/* Modeless Window Dialogs */
.windowDialog,
.windowDialog prefpane {
padding: 0;
}
.contentPane {
margin: 9px 8px 5px;
}
.actionButtons {
margin: 0 3px 6px !important;
}
/* Cookies Manager */
#cookiesChildren::-moz-tree-image(domainCol) {
width: 16px;
height: 16px;
margin: 0 2px;
list-style-image: url("chrome://mozapps/skin/places/defaultFavicon.png") !important;
}
#cookiesChildren::-moz-tree-image(domainCol, container) {
list-style-image: url("chrome://global/skin/icons/folder-item.png") !important;
-moz-image-region: rect(0, 32px, 16px, 16px);
}
#cookiesChildren::-moz-tree-image(domainCol, container, open) {
-moz-image-region: rect(16px, 32px, 32px, 16px);
}
#cookieInfoBox {
border: 1px solid ThreeDShadow;
border-radius: 0;
margin: 4px;
padding: 0;
}
/* Advanced Pane */
/* Adding padding-bottom prevents the bottom of the tabpanel from being cutoff
when browser.preferences.animateFadeIn = true */
#advancedPrefs {
padding-bottom: 8px;
}
/* bottom-most box containing a groupbox in a prefpane. Prevents the bottom
of the groupbox from being cutoff */
.bottomBox {
padding-bottom: 4px;
}
%ifdef MOZ_SERVICES_SYNC
/* Sync Pane */
#syncDesc {
padding: 0 8em;
}
.syncGroupBox {
padding: 10px;
}
#accountCaptionImage {
list-style-image: url("chrome://mozapps/skin/profile/profileicon.png");
}
#syncAddDeviceLabel {
margin-top: 1em;
margin-bottom: 1em;
}
#syncEnginesList {
height: 11em;
}
%endif

845
app/build.sh Executable file
View file

@ -0,0 +1,845 @@
#!/bin/bash -e
# Copyright (c) 2011 Zotero
# Center for History and New Media
# George Mason University, Fairfax, Virginia, USA
# http://zotero.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
CALLDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
. "$CALLDIR/config.sh"
if [ "`uname`" = "Darwin" ]; then
MAC_NATIVE=1
else
MAC_NATIVE=0
fi
if [ "`uname -o 2> /dev/null`" = "Cygwin" ]; then
WIN_NATIVE=1
else
WIN_NATIVE=0
fi
function usage {
cat >&2 <<DONE
Usage: $0 [-d DIR] [-f FILE] -p PLATFORMS [-c CHANNEL] [-d]
Options
-d DIR build directory to build from (from build_xpi; cannot be used with -f)
-f FILE ZIP file to build from (cannot be used with -d)
-t add devtools
-p PLATFORMS build for platforms PLATFORMS (m=Mac, w=Windows, l=Linux)
-c CHANNEL use update channel CHANNEL
-e enforce signing
-s don't package; only build binaries in staging/ directory
-q quick build (skip compression and other optional steps for faster restarts during development)
DONE
exit 1
}
BUILD_DIR=`mktemp -d`
function cleanup {
rm -rf $BUILD_DIR
}
trap cleanup EXIT
function abspath {
echo $(cd $(dirname $1); pwd)/$(basename $1);
}
SOURCE_DIR=""
ZIP_FILE=""
BUILD_MAC=0
BUILD_WIN=0
BUILD_LINUX=0
PACKAGE=1
DEVTOOLS=0
quick_build=0
while getopts "d:f:p:c:tseq" opt; do
case $opt in
d)
SOURCE_DIR="$OPTARG"
;;
f)
ZIP_FILE="$OPTARG"
;;
p)
for i in `seq 0 1 $((${#OPTARG}-1))`
do
case ${OPTARG:i:1} in
m) BUILD_MAC=1;;
w) BUILD_WIN=1;;
l) BUILD_LINUX=1;;
*)
echo "$0: Invalid platform option ${OPTARG:i:1}"
usage
;;
esac
done
;;
c)
UPDATE_CHANNEL="$OPTARG"
;;
t)
DEVTOOLS=1
;;
e)
SIGN=1
;;
s)
PACKAGE=0
;;
q)
quick_build=1
;;
*)
usage
;;
esac
shift $((OPTIND-1)); OPTIND=1
done
# Require source dir or ZIP file
if [[ -z "$SOURCE_DIR" ]] && [[ -z "$ZIP_FILE" ]]; then
usage
elif [[ -n "$SOURCE_DIR" ]] && [[ -n "$ZIP_FILE" ]]; then
usage
fi
# Require at least one platform
if [[ $BUILD_MAC == 0 ]] && [[ $BUILD_WIN == 0 ]] && [[ $BUILD_LINUX == 0 ]]; then
usage
fi
if [[ -z "${ZOTERO_INCLUDE_TESTS:-}" ]] || [[ $ZOTERO_INCLUDE_TESTS == "0" ]]; then
include_tests=0
else
include_tests=1
fi
# Bundle devtools with dev builds
if [ "$UPDATE_CHANNEL" == "beta" ] || [ "$UPDATE_CHANNEL" == "dev" ] || [ "$UPDATE_CHANNEL" == "test" ]; then
DEVTOOLS=1
fi
if [ -z "$UPDATE_CHANNEL" ]; then UPDATE_CHANNEL="default"; fi
BUILD_ID=`date +%Y%m%d%H%M%S`
# Paths to Gecko runtimes
MAC_RUNTIME_PATH="$CALLDIR/xulrunner/Firefox.app"
WIN_RUNTIME_PATH_PREFIX="$CALLDIR/xulrunner/firefox-"
LINUX_RUNTIME_PATH_PREFIX="$CALLDIR/xulrunner/firefox-"
base_dir="$BUILD_DIR/base"
app_dir="$BUILD_DIR/base/app"
omni_dir="$BUILD_DIR/base/app/omni"
shopt -s extglob
mkdir -p "$app_dir"
rm -rf "$STAGE_DIR"
mkdir "$STAGE_DIR"
rm -rf "$DIST_DIR"
mkdir "$DIST_DIR"
# Save build id, which is needed for updates manifest
echo $BUILD_ID > "$DIST_DIR/build_id"
cd "$app_dir"
# Copy 'browser' files from Firefox
#
# omni.ja is left uncompressed within the Firefox application files by fetch_xulrunner
set +e
if [ $BUILD_MAC == 1 ]; then
cp -Rp "$MAC_RUNTIME_PATH"/Contents/Resources/browser/omni "$app_dir"
elif [ $BUILD_WIN == 1 ]; then
# Non-arch-specific files, so just use 64-bit version
cp -Rp "${WIN_RUNTIME_PATH_PREFIX}win64"/browser/omni "$app_dir"
elif [ $BUILD_LINUX == 1 ]; then
# Non-arch-specific files, so just use 64-bit version
cp -Rp "${LINUX_RUNTIME_PATH_PREFIX}x86_64"/browser/omni "$app_dir"
fi
set -e
cd $omni_dir
# Move some Firefox files that would be overwritten out of the way
mv chrome.manifest chrome.manifest-fx
mv components components-fx
mv defaults defaults-fx
# Extract Zotero files
if [ -n "$ZIP_FILE" ]; then
ZIP_FILE="`abspath $ZIP_FILE`"
echo "Building from $ZIP_FILE"
unzip -q $ZIP_FILE -d "$omni_dir"
else
rsync_params=""
if [ $include_tests -eq 0 ]; then
rsync_params="--exclude /test"
fi
rsync -a $rsync_params "$SOURCE_DIR/" ./
fi
#
# Merge preserved files from Firefox
#
# components
mv components/* components-fx
rmdir components
mv components-fx components
mv defaults defaults-z
mv defaults-fx defaults
prefs_file=defaults/preferences/zotero.js
# Transfer Firefox prefs, omitting some with undesirable overrides from the base prefs
#
# - network.captive-portal-service.enabled
# Disable the captive portal check against Mozilla servers
egrep -v '(network.captive-portal-service.enabled)' defaults/preferences/firefox.js > $prefs_file
rm defaults/preferences/firefox.js
# Combine app and "extension" Zotero prefs
echo "" >> $prefs_file
echo "#" >> $prefs_file
echo "# Zotero app prefs" >> $prefs_file
echo "#" >> $prefs_file
echo "" >> $prefs_file
cat "$CALLDIR/assets/prefs.js" >> $prefs_file
echo "" >> $prefs_file
echo "# Zotero extension prefs" >> $prefs_file
echo "" >> $prefs_file
cat defaults-z/preferences/zotero.js >> $prefs_file
rm -rf defaults-z
# Platform-specific prefs
if [ $BUILD_MAC == 1 ]; then
perl -pi -e 's/pref\("browser\.preferences\.instantApply", false\);/pref\("browser\.preferences\.instantApply", true);/' $prefs_file
perl -pi -e 's/%GECKO_VERSION%/'"$GECKO_VERSION_MAC"'/g' $prefs_file
# Fix horizontal mousewheel scrolling (this is set to 4 in the Fx60 .app greprefs.js, but
# defaults to 1 in later versions of Firefox, and needs to be 1 to work on macOS)
echo 'pref("mousewheel.with_shift.action", 1);' >> $prefs_file
elif [ $BUILD_WIN == 1 ]; then
perl -pi -e 's/%GECKO_VERSION%/'"$GECKO_VERSION_WIN"'/g' $prefs_file
elif [ $BUILD_LINUX == 1 ]; then
# Modify platform-specific prefs
perl -pi -e 's/pref\("browser\.preferences\.instantApply", false\);/pref\("browser\.preferences\.instantApply", true);/' $prefs_file
perl -pi -e 's/%GECKO_VERSION%/'"$GECKO_VERSION_LINUX"'/g' $prefs_file
fi
# Clear list of built-in add-ons
echo '{"dictionaries": {"en-US": "dictionaries/en-US.dic"}, "system": []}' > chrome/browser/content/browser/built_in_addons.json
# chrome.manifest
mv chrome.manifest zotero.manifest
mv chrome.manifest-fx chrome.manifest
# TEMP
#echo "manifest zotero.manifest" >> "$base_dir/chrome.manifest"
cat zotero.manifest >> chrome.manifest
rm zotero.manifest
# Update channel
perl -pi -e 's/pref\("app\.update\.channel", "[^"]*"\);/pref\("app\.update\.channel", "'"$UPDATE_CHANNEL"'");/' $prefs_file
echo -n "Channel: "
grep app.update.channel $prefs_file
echo
# Add devtools prefs
if [ $DEVTOOLS -eq 1 ]; then
echo >> $prefs_file
echo "// Dev Tools" >> $prefs_file
echo 'pref("devtools.debugger.remote-enabled", true);' >> $prefs_file
echo 'pref("devtools.debugger.remote-port", 6100);' >> $prefs_file
if [ $UPDATE_CHANNEL != "beta" ]; then
echo 'pref("devtools.debugger.prompt-connection", false);' >> $prefs_file
fi
fi
# 5.0.96.3 / 5.0.97-beta.37+ddc7be75c
VERSION=`perl -ne 'print and last if s/.*<em:version>(.+)<\/em:version>.*/\1/;' install.rdf`
# 5.0.96 / 5.0.97
VERSION_NUMERIC=`perl -ne 'print and last if s/.*<em:version>(\d+\.\d+(\.\d+)?).*<\/em:version>.*/\1/;' install.rdf`
if [ -z "$VERSION" ]; then
echo "Version number not found in install.rdf"
exit 1
fi
rm install.rdf
echo
echo "Version: $VERSION"
# Delete Mozilla signing info if present
rm -rf META-INF
# Copy branding
#cp -R "$CALLDIR/assets/branding/content" chrome/branding/content
cp -R "$CALLDIR"/assets/branding/locale/brand.{dtd,properties} chrome/en-US/locale/branding/
cp "$CALLDIR/assets/branding/locale/brand.ftl" localization/en-US/branding/brand.ftl
# Copy localization .ftl files
for locale in `ls chrome/locale/`; do
mkdir -p localization/$locale/mozilla
cp chrome/locale/$locale/zotero/mozilla/*.ftl localization/$locale/mozilla/
# TEMP: Until we've created zotero.ftl in all locales
touch chrome/locale/$locale/zotero/zotero.ftl
cp chrome/locale/$locale/zotero/*.ftl localization/$locale/
done
# Add to chrome manifest
echo "" >> chrome.manifest
cat "$CALLDIR/assets/chrome.manifest" >> chrome.manifest
# Move test files to root directory
if [ $include_tests -eq 1 ]; then
cat test/chrome.manifest >> chrome.manifest
rm test/chrome.manifest
cp -R test/tests "$base_dir/tests"
fi
# Copy platform-specific assets
if [ $BUILD_MAC == 1 ]; then
rsync -a "$CALLDIR/assets/mac/" ./
elif [ $BUILD_WIN == 1 ]; then
rsync -a "$CALLDIR/assets/win/" ./
elif [ $BUILD_LINUX == 1 ]; then
rsync -a "$CALLDIR/assets/unix/" ./
fi
# Add word processor plug-ins
if [ $BUILD_MAC == 1 ]; then
pluginDir="$CALLDIR/modules/zotero-word-for-mac-integration"
mkdir -p "integration/word-for-mac"
cp -RH "$pluginDir/components" \
"$pluginDir/resource" \
"$pluginDir/chrome.manifest" \
"integration/word-for-mac"
echo -n "Word for Mac plugin version: "
cat "integration/word-for-mac/resource/version.txt"
echo
echo >> $prefs_file
cat "$CALLDIR/modules/zotero-word-for-mac-integration/defaults/preferences/zoteroMacWordIntegration.js" >> $prefs_file
echo >> $prefs_file
elif [ $BUILD_WIN == 1 ]; then
pluginDir="$CALLDIR/modules/zotero-word-for-windows-integration"
mkdir -p "integration/word-for-windows"
cp -RH "$pluginDir/components" \
"$pluginDir/resource" \
"$pluginDir/chrome.manifest" \
"integration/word-for-windows"
echo -n "Word for Windows plugin version: "
cat "integration/word-for-windows/resource/version.txt"
echo
echo >> $prefs_file
cat "$CALLDIR/modules/zotero-word-for-windows-integration/defaults/preferences/zoteroWinWordIntegration.js" >> $prefs_file
echo >> $prefs_file
fi
# Libreoffice plugin for all platforms
pluginDir="$CALLDIR/modules/zotero-libreoffice-integration"
mkdir -p "integration/libreoffice"
cp -RH "$pluginDir/chrome" \
"$pluginDir/components" \
"$pluginDir/resource" \
"$pluginDir/chrome.manifest" \
"integration/libreoffice"
echo -n "LibreOffice plugin version: "
cat "integration/libreoffice/resource/version.txt"
echo
echo >> $prefs_file
cat "$CALLDIR/modules/zotero-libreoffice-integration/defaults/preferences/zoteroLibreOfficeIntegration.js" >> $prefs_file
echo >> $prefs_file
# Delete files that shouldn't be distributed
find chrome -name .DS_Store -exec rm -f {} \;
# Zip browser and Zotero files into omni.ja
if [ $quick_build -eq 1 ]; then
# If quick build, don't compress or optimize
zip -qrXD omni.ja *
else
zip -qr9XD omni.ja *
python3 "$CALLDIR/scripts/optimizejars.py" --optimize ./ ./ ./
fi
mv omni.ja ..
cd "$CALLDIR"
rm -rf "$omni_dir"
# Copy updater.ini
cp "$CALLDIR/assets/updater.ini" "$base_dir"
# Adjust chrome.manifest
#perl -pi -e 's^(chrome|resource)/^jar:zotero.jar\!/$1/^g' "$BUILD_DIR/zotero/chrome.manifest"
# Copy icons
mkdir "$base_dir/chrome"
cp -R "$CALLDIR/assets/icons" "$base_dir/chrome/icons"
# Copy application.ini and modify
cp "$CALLDIR/assets/application.ini" "$app_dir/application.ini"
perl -pi -e "s/\{\{VERSION}}/$VERSION/" "$app_dir/application.ini"
perl -pi -e "s/\{\{BUILDID}}/$BUILD_ID/" "$app_dir/application.ini"
# Remove unnecessary files
find "$BUILD_DIR" -name .DS_Store -exec rm -f {} \;
# Mac
if [ $BUILD_MAC == 1 ]; then
echo 'Building Zotero.app'
# Set up directory structure
APPDIR="$STAGE_DIR/Zotero.app"
rm -rf "$APPDIR"
mkdir "$APPDIR"
chmod 755 "$APPDIR"
cp -r "$CALLDIR/mac/Contents" "$APPDIR"
CONTENTSDIR="$APPDIR/Contents"
# Merge relevant assets from Firefox
mkdir "$CONTENTSDIR/MacOS"
cp -r "$MAC_RUNTIME_PATH/Contents/MacOS/"!(firefox|firefox-bin|crashreporter.app|pingsender|updater.app) "$CONTENTSDIR/MacOS"
cp -r "$MAC_RUNTIME_PATH/Contents/Resources/"!(application.ini|browser|defaults|precomplete|removed-files|updater.ini|update-settings.ini|webapprt*|*.icns|*.lproj) "$CONTENTSDIR/Resources"
# Use our own launcher
xz -d --stdout "$CALLDIR/mac/zotero.xz" > "$CONTENTSDIR/MacOS/zotero"
chmod 755 "$CONTENTSDIR/MacOS/zotero"
# TEMP: Modified versions of some Firefox components for Big Sur, placed in xulrunner/MacOS
#cp "$MAC_RUNTIME_PATH/../MacOS/"{libc++.1.dylib,libnss3.dylib,XUL} "$CONTENTSDIR/MacOS/"
# Use our own updater, because Mozilla's requires updates signed by Mozilla
cd "$CONTENTSDIR/MacOS"
tar -xjf "$CALLDIR/mac/updater.tar.bz2"
# Copy PDF tools and data
cp "$CALLDIR/pdftools/pdftotext-mac" "$CONTENTSDIR/MacOS/pdftotext"
cp "$CALLDIR/pdftools/pdfinfo-mac" "$CONTENTSDIR/MacOS/pdfinfo"
cp -R "$CALLDIR/pdftools/poppler-data" "$CONTENTSDIR/Resources/"
# Modify Info.plist
perl -pi -e "s/\{\{VERSION\}\}/$VERSION/" "$CONTENTSDIR/Info.plist"
perl -pi -e "s/\{\{VERSION_NUMERIC\}\}/$VERSION_NUMERIC/" "$CONTENTSDIR/Info.plist"
if [ $UPDATE_CHANNEL == "beta" ] || [ $UPDATE_CHANNEL == "dev" ] || [ $UPDATE_CHANNEL == "source" ]; then
perl -pi -e "s/org\.zotero\.zotero/org.zotero.zotero-$UPDATE_CHANNEL/" "$CONTENTSDIR/Info.plist"
fi
perl -pi -e "s/\{\{VERSION\}\}/$VERSION/" "$CONTENTSDIR/Info.plist"
# Needed for "monkeypatch" Windows builds:
# http://www.nntp.perl.org/group/perl.perl5.porters/2010/08/msg162834.html
rm -f "$CONTENTSDIR/Info.plist.bak"
echo
grep -B 1 org.zotero.zotero "$CONTENTSDIR/Info.plist"
echo
grep -A 1 CFBundleShortVersionString "$CONTENTSDIR/Info.plist"
echo
grep -A 1 CFBundleVersion "$CONTENTSDIR/Info.plist"
echo
# Copy app files
rsync -a "$base_dir/" "$CONTENTSDIR/Resources/"
# Add word processor plug-ins
mkdir "$CONTENTSDIR/Resources/integration"
cp -RH "$CALLDIR/modules/zotero-libreoffice-integration/install" "$CONTENTSDIR/Resources/integration/libreoffice"
cp -RH "$CALLDIR/modules/zotero-word-for-mac-integration/install" "$CONTENTSDIR/Resources/integration/word-for-mac"
# Delete extraneous files
find "$CONTENTSDIR" -depth -type d -name .git -exec rm -rf {} \;
find "$CONTENTSDIR" \( -name .DS_Store -or -name update.rdf \) -exec rm -f {} \;
# Copy over removed-files and make a precomplete file here since it needs to be stable for the
# signature. This is done in build_autocomplete.sh for other platforms.
cp "$CALLDIR/update-packaging/removed-files_mac" "$CONTENTSDIR/Resources/removed-files"
touch "$CONTENTSDIR/Resources/precomplete"
# Sign
if [ $SIGN == 1 ]; then
# Unlock keychain if a password is provided (necessary for building from a shell)
if [ -n "$KEYCHAIN_PASSWORD" ]; then
security -v unlock-keychain -p "$KEYCHAIN_PASSWORD" ~/Library/Keychains/$KEYCHAIN.keychain-db
fi
# Clear extended attributes, which can cause codesign to fail
/usr/bin/xattr -cr "$APPDIR"
# Sign app
entitlements_file="$CALLDIR/mac/entitlements.xml"
/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" \
"$APPDIR/Contents/MacOS/pdftotext" \
"$APPDIR/Contents/MacOS/pdfinfo" \
"$APPDIR/Contents/MacOS/XUL" \
"$APPDIR/Contents/MacOS/updater.app/Contents/MacOS/org.mozilla.updater"
find "$APPDIR/Contents" -name '*.dylib' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;
find "$APPDIR/Contents" -name '*.app' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;
/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" "$APPDIR/Contents/MacOS/zotero"
# Bundle and sign Safari App Extension
#
# Even though it's signed by Xcode, we sign it again to make sure it matches the parent app signature
if [[ -n "$SAFARI_APPEX" ]] && [[ -d "$SAFARI_APPEX" ]]; then
echo
# Extract entitlements, which differ from parent app
/usr/bin/codesign -d --entitlements :"$BUILD_DIR/safari-entitlements.plist" $SAFARI_APPEX
mkdir "$APPDIR/Contents/PlugIns"
cp -R $SAFARI_APPEX "$APPDIR/Contents/PlugIns/ZoteroSafariExtension.appex"
# Add suffix to appex bundle identifier
if [ $UPDATE_CHANNEL == "beta" ] || [ $UPDATE_CHANNEL == "dev" ] || [ $UPDATE_CHANNEL == "source" ]; then
perl -pi -e "s/org\.zotero\.SafariExtensionApp\.SafariExtension/org.zotero.SafariExtensionApp.SafariExtension-$UPDATE_CHANNEL/" "$APPDIR/Contents/PlugIns/ZoteroSafariExtension.appex/Contents/Info.plist"
fi
find "$APPDIR/Contents/PlugIns/ZoteroSafariExtension.appex/Contents" -name '*.dylib' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;
/usr/bin/codesign --force --options runtime --entitlements "$BUILD_DIR/safari-entitlements.plist" --sign "$DEVELOPER_ID" "$APPDIR/Contents/PlugIns/ZoteroSafariExtension.appex"
fi
# Sign final app package
echo
/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" "$APPDIR"
# Verify app
/usr/bin/codesign --verify -vvvv "$APPDIR"
# Verify Safari App Extension
if [[ -n "$SAFARI_APPEX" ]] && [[ -d "$SAFARI_APPEX" ]]; then
echo
/usr/bin/codesign --verify -vvvv "$APPDIR/Contents/PlugIns/ZoteroSafariExtension.appex"
fi
fi
# Build and notarize disk image
if [ $PACKAGE == 1 ]; then
if [ $MAC_NATIVE == 1 ]; then
echo "Creating Mac installer"
dmg="$DIST_DIR/Zotero-$VERSION.dmg"
"$CALLDIR/mac/pkg-dmg" --source "$STAGE_DIR/Zotero.app" \
--target "$dmg" \
--sourcefile --volname Zotero --copy "$CALLDIR/mac/DSStore:/.DS_Store" \
--symlink /Applications:"/Drag Here to Install" > /dev/null
# Upload disk image to Apple
output=$("$CALLDIR/scripts/notarize_mac_app" "$dmg")
echo
echo "$output"
echo
id=$(echo "$output" | plutil -extract notarization-upload.RequestUUID xml1 -o - - | sed -n "s/.*<string>\(.*\)<\/string>.*/\1/p")
echo "Notarization request identifier: $id"
echo
sleep 60
# Check back every 30 seconds, for up to an hour
i="0"
while [ $i -lt 120 ]
do
status=$("$CALLDIR/scripts/notarization_status" $id)
if [[ $status != "in progress" ]]; then
break
fi
echo "Notarization in progress"
sleep 30
i=$[$i+1]
done
# Staple notarization info to disk image
if [ $status == "success" ]; then
"$CALLDIR/scripts/notarization_stapler" "$dmg"
else
echo "Notarization failed!"
"$CALLDIR/scripts/notarization_status" $id
exit 1
fi
echo "Notarization complete"
else
echo 'Not building on Mac; creating Mac distribution as a zip file'
rm -f "$DIST_DIR/Zotero_mac.zip"
cd "$STAGE_DIR" && zip -rqX "$DIST_DIR/Zotero-${VERSION}_mac.zip" Zotero.app
fi
fi
fi
# Windows
if [ $BUILD_WIN == 1 ]; then
echo "Building Windows common"
COMMON_APPDIR="$STAGE_DIR/Zotero_common"
mkdir "$COMMON_APPDIR"
# Copy PDF tools and data
cp "$CALLDIR/pdftools/pdftotext-win.exe" "$COMMON_APPDIR/pdftotext.exe"
cp "$CALLDIR/pdftools/pdfinfo-win.exe" "$COMMON_APPDIR/pdfinfo.exe"
# Package non-arch-specific components
if [ $PACKAGE -eq 1 ]; then
# Copy installer files
cp -r "$CALLDIR/win/installer" "$BUILD_DIR/win_installer"
perl -pi -e "s/\{\{VERSION}}/$VERSION/" "$BUILD_DIR/win_installer/defines.nsi"
mkdir "$COMMON_APPDIR/uninstall"
# Use our own updater, because Mozilla's requires updates signed by Mozilla
cp "$CALLDIR/win/updater.exe" "$COMMON_APPDIR"
cat "$CALLDIR/win/installer/updater_append.ini" >> "$COMMON_APPDIR/updater.ini"
# Sign PDF tools and updater
if [ $SIGN -eq 1 ]; then
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC PDF Converter" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$COMMON_APPDIR/pdftotext.exe\"`"
sleep $SIGNTOOL_DELAY
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC PDF Info" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$COMMON_APPDIR/pdfinfo.exe\"`"
sleep $SIGNTOOL_DELAY
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC Updater" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$COMMON_APPDIR/updater.exe\"`"
fi
# Compress 7zSD.sfx
upx --best -o "`cygpath -w \"$BUILD_DIR/7zSD.sfx\"`" \
"`cygpath -w \"$CALLDIR/win/installer/7zstub/firefox/7zSD.sfx\"`" > /dev/null
fi
for arch in "win32" "win64"; do
echo "Building Zotero_$arch"
runtime_path="${WIN_RUNTIME_PATH_PREFIX}${arch}"
# Set up directory
APPDIR="$STAGE_DIR/Zotero_$arch"
mkdir "$APPDIR"
# Copy relevant assets from Firefox
cp -R "$runtime_path"/!(application.ini|browser|defaults|devtools-files|crashreporter*|firefox.exe|maintenanceservice*|precomplete|removed-files|uninstall|update*) "$APPDIR"
# Copy vcruntime140_1.dll
if [ $arch = "win64" ]; then
cp "$CALLDIR/xulrunner/vc-$arch/vcruntime140_1.dll" "$APPDIR"
fi
# Copy zotero.exe, which is built directly from Firefox source and then modified by
# ResourceHacker to add icons
tar xf "$CALLDIR/win/zotero.exe.tar.xz" --to-stdout zotero_$arch.exe > "$APPDIR/zotero.exe"
# Update .exe version number (only possible on Windows)
if [ $WIN_NATIVE == 1 ]; then
# FileVersion is limited to four integers, so it won't be properly updated for non-release
# builds (e.g., we'll show 5.0.97.0 for 5.0.97-beta.37). ProductVersion will be the full
# version string.
rcedit "`cygpath -w \"$APPDIR/zotero.exe\"`" \
--set-file-version "$VERSION_NUMERIC" \
--set-product-version "$VERSION"
fi
# Copy app files
rsync -a "$base_dir/" "$APPDIR/"
#mv "$APPDIR/app/application.ini" "$APPDIR/"
# Copy in common files
rsync -a "$COMMON_APPDIR/" "$APPDIR/"
# Add devtools
#if [ $DEVTOOLS -eq 1 ]; then
# # Create devtools.jar
# cd "$BUILD_DIR"
# mkdir -p devtools/locale
# cp -r "$runtime_path"/devtools-files/chrome/devtools/* devtools/
# cp -r "$runtime_path"/devtools-files/chrome/locale/* devtools/locale/
# cd devtools
# zip -r -q ../devtools.jar *
# cd ..
# rm -rf devtools
# mv devtools.jar "$APPDIR"
#
# cp "$runtime_path/devtools-files/components/interfaces.xpt" "$APPDIR/components/"
#fi
# Add word processor plug-ins
mkdir -p "$APPDIR/integration"
cp -RH "$CALLDIR/modules/zotero-libreoffice-integration/install" "$APPDIR/integration/libreoffice"
cp -RH "$CALLDIR/modules/zotero-word-for-windows-integration/install" "$APPDIR/integration/word-for-windows"
# Copy PDF tools data
cp -R "$CALLDIR/pdftools/poppler-data" "$APPDIR/"
# Delete extraneous files
find "$APPDIR" -depth -type d -name .git -exec rm -rf {} \;
find "$APPDIR" \( -name .DS_Store -or -name '.git*' -or -name '.travis.yml' -or -name update.rdf -or -name '*.bak' \) -exec rm -f {} \;
find "$APPDIR" \( -name '*.exe' -or -name '*.dll' \) -exec chmod 755 {} \;
if [ $PACKAGE -eq 1 ]; then
if [ $WIN_NATIVE -eq 1 ]; then
echo "Creating Windows installer"
# Build uninstaller
if [ "$arch" = "win32" ]; then
"`cygpath -u \"${NSIS_DIR}makensis.exe\"`" /V1 "`cygpath -w \"$BUILD_DIR/win_installer/uninstaller.nsi\"`"
elif [ "$arch" = "win64" ]; then
"`cygpath -u \"${NSIS_DIR}makensis.exe\"`" /DHAVE_64BIT_OS /V1 "`cygpath -w \"$BUILD_DIR/win_installer/uninstaller.nsi\"`"
fi
mv "$BUILD_DIR/win_installer/helper.exe" "$APPDIR/uninstall"
if [ $SIGN -eq 1 ]; then
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC Uninstaller" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$APPDIR/uninstall/helper.exe\"`"
sleep $SIGNTOOL_DELAY
fi
if [ "$arch" = "win32" ]; then
INSTALLER_PATH="$DIST_DIR/Zotero-${VERSION}_win32_setup.exe"
elif [ "$arch" = "win64" ]; then
INSTALLER_PATH="$DIST_DIR/Zotero-${VERSION}_x64_setup.exe"
fi
if [ $SIGN -eq 1 ]; then
# Sign zotero.exe
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC" \
/du "$SIGNATURE_URL" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$APPDIR/zotero.exe\"`"
sleep $SIGNTOOL_DELAY
fi
# Stage installer
INSTALLER_STAGE_DIR="$BUILD_DIR/win_installer/staging"
rm -rf "$INSTALLER_STAGE_DIR"
mkdir "$INSTALLER_STAGE_DIR"
cp -r "$APPDIR" "$INSTALLER_STAGE_DIR/core"
# Build and sign setup.exe
if [ "$arch" = "win32" ]; then
"`cygpath -u \"${NSIS_DIR}makensis.exe\"`" /V1 "`cygpath -w \"$BUILD_DIR/win_installer/installer.nsi\"`"
elif [ "$arch" = "win64" ]; then
"`cygpath -u \"${NSIS_DIR}makensis.exe\"`" /DHAVE_64BIT_OS /V1 "`cygpath -w \"$BUILD_DIR/win_installer/installer.nsi\"`"
fi
mv "$BUILD_DIR/win_installer/setup.exe" "$INSTALLER_STAGE_DIR"
if [ $SIGN == 1 ]; then
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC Setup" \
/du "$SIGNATURE_URL" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$INSTALLER_STAGE_DIR/setup.exe\"`"
sleep $SIGNTOOL_DELAY
fi
# Compress application
cd "$INSTALLER_STAGE_DIR" && 7z a -r -t7z "`cygpath -w \"$BUILD_DIR/app_$arch.7z\"`" \
-mx -m0=BCJ2 -m1=LZMA:d24 -m2=LZMA:d19 -m3=LZMA:d19 -mb0:1 -mb0s1:2 -mb0s2:3 > /dev/null
# Combine 7zSD.sfx and app.tag into setup.exe
cat "$BUILD_DIR/7zSD.sfx" "$CALLDIR/win/installer/app.tag" \
"$BUILD_DIR/app_$arch.7z" > "$INSTALLER_PATH"
# Sign installer .exe
if [ $SIGN == 1 ]; then
"`cygpath -u \"$SIGNTOOL\"`" \
sign /n "$SIGNTOOL_CERT_SUBJECT" \
/d "$SIGNATURE_DESC Setup" \
/du "$SIGNATURE_URL" \
/fd SHA256 \
/tr "$SIGNTOOL_TIMESTAMP_SERVER" \
/td SHA256 \
"`cygpath -w \"$INSTALLER_PATH\"`"
fi
chmod 755 "$INSTALLER_PATH"
else
echo 'Not building on Windows; only building zip file'
fi
cd "$STAGE_DIR"
if [ $arch = "win32" ]; then
zip -rqX "$DIST_DIR/Zotero-${VERSION}_$arch.zip" Zotero_$arch
elif [ $arch = "win64" ]; then
zip -rqX "$DIST_DIR/Zotero-${VERSION}_win-x64.zip" Zotero_$arch
fi
fi
done
rm -rf "$COMMON_APPDIR"
fi
# Linux
if [ $BUILD_LINUX == 1 ]; then
for arch in "i686" "x86_64"; do
runtime_path="${LINUX_RUNTIME_PATH_PREFIX}${arch}"
# Set up directory
echo 'Building Zotero_linux-'$arch
APPDIR="$STAGE_DIR/Zotero_linux-$arch"
rm -rf "$APPDIR"
mkdir "$APPDIR"
# Merge relevant assets from Firefox
cp -r "$runtime_path/"!(application.ini|browser|defaults|devtools-files|crashreporter|crashreporter.ini|firefox|pingsender|precomplete|removed-files|run-mozilla.sh|update-settings.ini|updater|updater.ini) "$APPDIR"
# Use our own launcher that calls the original Firefox executable with -app
mv "$APPDIR"/firefox-bin "$APPDIR"/zotero-bin
cp "$CALLDIR/linux/zotero" "$APPDIR"/zotero
# Copy Ubuntu launcher files
cp "$CALLDIR/linux/zotero.desktop" "$APPDIR"
cp "$CALLDIR/linux/set_launcher_icon" "$APPDIR"
# Use our own updater, because Mozilla's requires updates signed by Mozilla
cp "$CALLDIR/linux/updater-$arch" "$APPDIR"/updater
# Copy PDF tools and data
cp "$CALLDIR/pdftools/pdftotext-linux-$arch" "$APPDIR/pdftotext"
cp "$CALLDIR/pdftools/pdfinfo-linux-$arch" "$APPDIR/pdfinfo"
cp -R "$CALLDIR/pdftools/poppler-data" "$APPDIR/"
# Copy app files
rsync -a "$base_dir/" "$APPDIR/"
# Add word processor plug-ins
mkdir "$APPDIR/integration"
cp -RH "$CALLDIR/modules/zotero-libreoffice-integration/install" "$APPDIR/integration/libreoffice"
# Delete extraneous files
find "$APPDIR" -depth -type d -name .git -exec rm -rf {} \;
find "$APPDIR" \( -name .DS_Store -or -name update.rdf \) -exec rm -f {} \;
if [ $PACKAGE == 1 ]; then
# Create tar
rm -f "$DIST_DIR/Zotero-${VERSION}_linux-$arch.tar.bz2"
cd "$STAGE_DIR"
tar -cjf "$DIST_DIR/Zotero-${VERSION}_linux-$arch.tar.bz2" "Zotero_linux-$arch"
fi
done
fi
rm -rf $BUILD_DIR

76
app/config.sh Normal file
View file

@ -0,0 +1,76 @@
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Version of Gecko to build with
GECKO_VERSION_MAC="102.9.0esr"
GECKO_VERSION_LINUX="102.9.0esr"
GECKO_VERSION_WIN="102.9.0esr"
RUST_VERSION=1.60.0
# URL prefix for custom builds of Firefox components
custom_components_url="https://download.zotero.org/dev/"
APP_NAME="Zotero"
APP_ID="zotero\@zotero.org"
PDF_TOOLS_VERSION="0.0.5"
PDF_TOOLS_URL="https://zotero-download.s3.amazonaws.com/pdftools/pdftools-$PDF_TOOLS_VERSION.tar.gz"
# Whether to sign builds
SIGN=0
# OS X Developer ID certificate information
DEVELOPER_ID=F0F1FE48DB909B263AC51C8215374D87FDC12121
# Keychain and keychain password, if not building via the GUI
KEYCHAIN=""
KEYCHAIN_PASSWORD=""
NOTARIZATION_BUNDLE_ID=""
NOTARIZATION_USER=""
NOTARIZATION_PASSWORD=""
# Paths for Windows installer build
NSIS_DIR='C:\Program Files (x86)\NSIS\Unicode\'
# Paths for Windows installer build only necessary for signed binaries
SIGNTOOL='C:\Program Files (x86)\Windows Kits\10\bin\10.0.19041.0\x64\signtool.exe'
SIGNATURE_DESC='Zotero'
SIGNATURE_URL='https://www.zotero.org/'
SIGNTOOL_CERT_SUBJECT="Corporation for Digital Scholarship"
SIGNTOOL_TIMESTAMP_SERVER="http://timestamp.sectigo.com"
SIGNTOOL_DELAY=15
# Directory for Zotero code repos
repo_dir=$( cd "$DIR"/.. && pwd )
# Directory for Zotero source code
ZOTERO_SOURCE_DIR="$repo_dir"/zotero-client
# Directory for Zotero build files (needed for scripts/*_build_and_deploy)
ZOTERO_BUILD_DIR="$repo_dir"/zotero-build
# Directory for unpacked binaries
STAGE_DIR="$DIR/staging"
# Directory for packed binaries
DIST_DIR="$DIR/dist"
SOURCE_REPO_URL="https://github.com/zotero/zotero"
S3_BUCKET="zotero-download"
S3_CI_ZIP_PATH="ci/client"
S3_DIST_PATH="client"
DEPLOY_HOST="deploy.zotero"
DEPLOY_PATH="www/www-production/public/download/client/manifests"
DEPLOY_CMD="ssh $DEPLOY_HOST update-site-files"
BUILD_PLATFORMS=""
NUM_INCREMENTALS=6
if [ "`uname`" = "Darwin" ]; then
alias mktemp='mktemp -t tmp'
shopt -s expand_aliases
fi
# Make utilities (mar/mbsdiff) available in the path
PATH="$DIR/xulrunner/bin:$PATH"
if [ -f "$DIR/config-custom.sh" ]; then
. "$DIR/config-custom.sh"
fi
unset DIR

21
app/fetch_mar_tools Executable file
View file

@ -0,0 +1,21 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$SCRIPT_DIR"
cd "$ROOT_DIR"
mkdir -p "xulrunner/bin"
if [ "`uname`" = "Darwin" ]; then
# Mozilla has Linux executables where the Mac files should be, so supply our own Mac builds
curl -o "xulrunner/bin/mar" https://zotero-download.s3.us-east-1.amazonaws.com/tools/mac/60.8.0esr/mar
curl -o "xulrunner/bin/mbsdiff" https://zotero-download.s3.us-east-1.amazonaws.com/tools/mac/60.8.0esr/mbsdiff
elif [ "`uname -o 2> /dev/null`" = "Cygwin" ]; then
curl -o "xulrunner/bin/mar.exe" https://ftp.mozilla.org/pub/firefox/nightly/2018/03/2018-03-01-10-01-39-mozilla-central/mar-tools/win64/mar.exe
curl -o "xulrunner/bin/mbsdiff.exe" https://ftp.mozilla.org/pub/firefox/nightly/2018/03/2018-03-01-10-01-39-mozilla-central/mar-tools/win64/mbsdiff.exe
else
curl -o "xulrunner/bin/mar" https://ftp.mozilla.org/pub/firefox/nightly/2018/03/2018-03-01-10-01-39-mozilla-central/mar-tools/linux64/mar
curl -o "xulrunner/bin/mbsdiff" https://ftp.mozilla.org/pub/firefox/nightly/2018/03/2018-03-01-10-01-39-mozilla-central/mar-tools/linux64/mbsdiff
fi
chmod 755 xulrunner/bin/mar xulrunner/bin/mbsdiff

35
app/fetch_pdftools Executable file
View file

@ -0,0 +1,35 @@
#!/bin/bash
set -euo pipefail
# Copyright (c) 2018 Zotero
# Center for History and New Media
# George Mason University, Fairfax, Virginia, USA
# http://zotero.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
. config.sh
rm -rf pdftools
mkdir pdftools
cd pdftools
curl -o pdftools.tar.gz $PDF_TOOLS_URL
tar -zxvf pdftools.tar.gz
rm pdftools.tar.gz
echo Done

398
app/fetch_xulrunner.sh Executable file
View file

@ -0,0 +1,398 @@
#!/bin/bash
set -euo pipefail
# Copyright (c) 2011 Zotero
# Center for History and New Media
# George Mason University, Fairfax, Virginia, USA
# http://zotero.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
CALLDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
. "$CALLDIR/config.sh"
function usage {
cat >&2 <<DONE
Usage: $0 -p platforms [-s]
Options
-p PLATFORMS Platforms to build (m=Mac, w=Windows, l=Linux)
DONE
exit 1
}
BUILD_MAC=0
BUILD_WIN=0
BUILD_LINUX=0
while getopts "p:s" opt; do
case $opt in
p)
for i in `seq 0 1 $((${#OPTARG}-1))`
do
case ${OPTARG:i:1} in
m) BUILD_MAC=1;;
w) BUILD_WIN=1;;
l) BUILD_LINUX=1;;
*)
echo "$0: Invalid platform option ${OPTARG:i:1}"
usage
;;
esac
done
;;
esac
shift $((OPTIND-1)); OPTIND=1
done
# Require at least one platform
if [[ $BUILD_MAC == 0 ]] && [[ $BUILD_WIN == 0 ]] && [[ $BUILD_LINUX == 0 ]]; then
usage
fi
function replace_line {
pattern=$1
replacement=$2
file=$3
if egrep -q "$pattern" "$file"; then
perl -pi -e "s/$pattern/$replacement/" "$file"
else
echo "$pattern" not found in "$file" -- aborting 2>&1
exit 1
fi
}
function remove_line {
pattern=$1
file=$2
if egrep -q "$pattern" "$file"; then
egrep -v "$pattern" "$file" > "$file.tmp"
mv "$file.tmp" "$file"
else
echo "$pattern" not found in "$infile" -- aborting 2>&1
exit 1
fi
}
#
# Make various modifications to the stock Firefox app
#
function modify_omni {
local platform=$1
mkdir omni
mv omni.ja omni
cd omni
# omni.ja is an "optimized" ZIP file, so use a script from Mozilla to avoid a warning from unzip
# here and to make it work after rezipping below
python3 "$CALLDIR/scripts/optimizejars.py" --deoptimize ./ ./ ./
rm -f omni.ja.log
unzip omni.ja
rm omni.ja
replace_line 'BROWSER_CHROME_URL:.+' 'BROWSER_CHROME_URL: "chrome:\/\/zotero\/content\/zoteroPane.xhtml",' modules/AppConstants.jsm
# https://firefox-source-docs.mozilla.org/toolkit/components/telemetry/internals/preferences.html
#
# It's not clear that most of these do anything anymore when not compiled in, but just in case
replace_line 'MOZ_REQUIRE_SIGNING:' 'MOZ_REQUIRE_SIGNING: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_DATA_REPORTING:' 'MOZ_DATA_REPORTING: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_SERVICES_HEALTHREPORT:' 'MOZ_SERVICES_HEALTHREPORT: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_TELEMETRY_REPORTING:' 'MOZ_TELEMETRY_REPORTING: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_TELEMETRY_ON_BY_DEFAULT:' 'MOZ_TELEMETRY_ON_BY_DEFAULT: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_CRASHREPORTER:' 'MOZ_CRASHREPORTER: false \&\&' modules/AppConstants.jsm
replace_line 'MOZ_UPDATE_CHANNEL:.+' 'MOZ_UPDATE_CHANNEL: "none",' modules/AppConstants.jsm
replace_line '"https:\/\/[^\/]+mozilla.com.+"' '""' modules/AppConstants.jsm
replace_line 'if \(!updateAuto\) \{' 'if (update.type == "major") {
LOG("UpdateService:_selectAndInstallUpdate - prompting because it is a major update");
AUSTLMY.pingCheckCode(this._pingSuffix, AUSTLMY.CHK_SHOWPROMPT_PREF);
Services.obs.notifyObservers(update, "update-available", "show-prompt");
return;
}
if (!updateAuto) {' modules/UpdateService.jsm
replace_line 'pref\("network.captive-portal-service.enabled".+' 'pref("network.captive-portal-service.enabled", false);' greprefs.js
replace_line 'pref\("network.connectivity-service.enabled".+' 'pref("network.connectivity-service.enabled", false);' greprefs.js
replace_line 'pref\("toolkit.telemetry.server".+' 'pref("toolkit.telemetry.server", "");' greprefs.js
replace_line 'pref\("toolkit.telemetry.unified".+' 'pref("toolkit.telemetry.unified", false);' greprefs.js
#
# # Disable transaction timeout
# perl -pi -e 's/let timeoutPromise/\/*let timeoutPromise/' modules/Sqlite.jsm
# perl -pi -e 's/return Promise.race\(\[transactionPromise, timeoutPromise\]\);/*\/return transactionPromise;/' modules/Sqlite.jsm
# rm -f jsloader/resource/gre/modules/Sqlite.jsm
#
# Disable unwanted components
remove_line '(RemoteSettings|services-|telemetry|Telemetry|URLDecorationAnnotationsService)' components/components.manifest
# Remove unwanted files
rm modules/FxAccounts*
# Causes a startup error -- try an empty file or a shim instead?
#rm modules/Telemetry*
rm modules/URLDecorationAnnotationsService.jsm
rm -rf modules/services-*
# Clear most WebExtension manifest properties
replace_line 'manifest = normalized.value;' 'manifest = normalized.value;
if (this.type == "extension") {
if (!manifest.applications?.zotero?.id) {
this.manifestError("applications.zotero.id not provided");
}
if (!manifest.applications?.zotero?.update_url) {
this.manifestError("applications.zotero.update_url not provided");
}
if (!manifest.applications?.zotero?.strict_max_version) {
this.manifestError("applications.zotero.strict_max_version not provided");
}
manifest.browser_specific_settings = undefined;
manifest.content_scripts = [];
manifest.permissions = [];
manifest.host_permissions = [];
manifest.web_accessible_resources = undefined;
manifest.experiment_apis = {};
}' modules/Extension.jsm
# Use applications.zotero instead of applications.gecko
replace_line 'let bss = manifest.applications\?.gecko' 'let bss = manifest.applications?.zotero' modules/addons/XPIInstall.jsm
replace_line 'manifest.applications\?.gecko' 'manifest.applications?.zotero' modules/Extension.jsm
# When installing addon, use app version instead of toolkit version for targetApplication
replace_line "id: TOOLKIT_ID," "id: '$APP_ID'," modules/addons/XPIInstall.jsm
# Accept zotero@chnm.gmu.edu for target application to allow Zotero 6 plugins to remain
# installed in Zotero 7
replace_line "if \(targetApp.id == Services.appinfo.ID\) \{" "if (targetApp.id == 'zotero\@chnm.gmu.edu') targetApp.id = '$APP_ID'; if (targetApp.id == Services.appinfo.ID) {" modules/addons/XPIDatabase.jsm
# For updates, look for applications.zotero instead of applications.gecko in manifest.json and
# use the app id and version for strict_min_version/strict_max_version comparisons
replace_line 'gecko: \{\},' 'zotero: {},' modules/addons/AddonUpdateChecker.jsm
replace_line 'if \(!\("gecko" in applications\)\) \{' 'if (!("zotero" in applications)) {' modules/addons/AddonUpdateChecker.jsm
replace_line '"gecko not in application entry' '"zotero not in application entry' modules/addons/AddonUpdateChecker.jsm
replace_line 'let app = getProperty\(applications, "gecko", "object"\);' 'let app = getProperty(applications, "zotero", "object");' modules/addons/AddonUpdateChecker.jsm
replace_line "id: TOOLKIT_ID," "id: '$APP_ID'," modules/addons/AddonUpdateChecker.jsm
replace_line 'AddonManagerPrivate.webExtensionsMinPlatformVersion' '7.0' modules/addons/AddonUpdateChecker.jsm
replace_line 'result.targetApplications.push' 'false && result.targetApplications.push' modules/addons/AddonUpdateChecker.jsm
# Allow addon installation by bypassing confirmation dialogs. If we want a confirmation dialog,
# we need to either add gXPInstallObserver from browser-addons.js [1][2] or provide our own with
# Ci.amIWebInstallPrompt [3].
#
# [1] https://searchfox.org/mozilla-esr102/rev/5a6d529652045050c5cdedc0558238949b113741/browser/base/content/browser.js#1902-1923
# [2] https://searchfox.org/mozilla-esr102/rev/5a6d529652045050c5cdedc0558238949b113741/browser/base/content/browser-addons.js#201
# [3] https://searchfox.org/mozilla-esr102/rev/5a6d529652045050c5cdedc0558238949b113741/toolkit/mozapps/extensions/AddonManager.jsm#3114-3124
replace_line 'if \(info.addon.userPermissions\) \{' 'if (false) {' modules/AddonManager.jsm
replace_line '\} else if \(info.addon.sitePermissions\) \{' '} else if (false) {' modules/AddonManager.jsm
replace_line '\} else if \(requireConfirm\) \{' '} else if (false) {' modules/AddonManager.jsm
# No idea why this is necessary, but without it initialization fails with "TypeError: "constructor" is read-only"
replace_line 'LoginStore.prototype.constructor = LoginStore;' '\/\/LoginStore.prototype.constructor = LoginStore;' modules/LoginStore.jsm
#
# # Allow proxy password saving
# perl -pi -e 's/get _inPrivateBrowsing\(\) \{/get _inPrivateBrowsing() {if (true) { return false; }/' components/nsLoginManagerPrompter.js
#
# # Change text in update dialog
# perl -pi -e 's/A security and stability update for/A new version of/' chrome/en-US/locale/en-US/mozapps/update/updates.properties
# perl -pi -e 's/updateType_major=New Version/updateType_major=New Major Version/' chrome/en-US/locale/en-US/mozapps/update/updates.properties
# perl -pi -e 's/updateType_minor=Security Update/updateType_minor=New Version/' chrome/en-US/locale/en-US/mozapps/update/updates.properties
# perl -pi -e 's/update for &brandShortName; as soon as possible/update as soon as possible/' chrome/en-US/locale/en-US/mozapps/update/updates.dtd
#
# Set available locales
cp "$CALLDIR/assets/multilocale.txt" res/multilocale.txt
#
# # Force Lucida Grande on non-Retina displays, since San Francisco is used otherwise starting in
# # Catalina, and it looks terrible
# if [[ $platform == 'mac' ]]; then
# echo "* { font-family: Lucida Grande, Lucida Sans Unicode, Lucida Sans, Geneva, -apple-system, sans-serif !important; }" >> chrome/toolkit/skin/classic/global/global.css
# fi
# Use Zotero URL opening in Mozilla dialogs (e.g., app update dialog)
replace_line 'function openURL\(aURL\) \{' 'function openURL(aURL) {let {Zotero} = ChromeUtils.import("chrome:\/\/zotero\/content\/include.jsm"); Zotero.launchURL(aURL); return;' chrome/toolkit/content/global/contentAreaUtils.js
#
# Modify Add-ons window
#
file="chrome/toolkit/content/mozapps/extensions/aboutaddons.css"
echo >> $file
# Hide search bar, Themes and Plugins tabs, and sidebar footer
echo '.main-search, button[name="theme"], button[name="plugin"], sidebar-footer { display: none; }' >> $file
echo '.main-heading { margin-top: 2em; }' >> $file
# Hide Details/Permissions tabs in addon details so we only show details
echo 'addon-details > button-group { display: none !important; }' >> $file
# Hide "Debug Addons" and "Manage Extension Shortcuts"
echo 'panel-item[action="debug-addons"], panel-item[action="reset-update-states"] + panel-item-separator, panel-item[action="manage-shortcuts"] { display: none }' >> $file
file="chrome/toolkit/content/mozapps/extensions/aboutaddons.js"
# Hide unsigned-addon warning
replace_line 'if \(!isCorrectlySigned\(addon\)\) \{' 'if (!isCorrectlySigned(addon)) {return {};' $file
# Hide Private Browsing setting in addon details
replace_line 'pbRow\.' '\/\/pbRow.' $file
replace_line 'let isAllowed = await isAllowedInPrivateBrowsing' '\/\/let isAllowed = await isAllowedInPrivateBrowsing' $file
# Use our own strings for the removal prompt
replace_line 'let \{ BrowserAddonUI \} = windowRoot.ownerGlobal;' '' $file
replace_line 'await BrowserAddonUI.promptRemoveExtension' 'promptRemoveExtension' $file
# Customize empty-list message
replace_line 'createEmptyListMessage\(\) {' 'createEmptyListMessage() {
var p = document.createElement("p");
p.id = "empty-list-message";
return p;' $file
# Swap in include.js, which we need for Zotero.getString(), for abuse-reports.js, which we don't need
# Hide Recommendations tab in sidebar and recommendations in main pane
replace_line 'function isDiscoverEnabled\(\) \{' 'function isDiscoverEnabled() {return false;' chrome/toolkit/content/mozapps/extensions/aboutaddonsCommon.js
replace_line 'pref\("extensions.htmlaboutaddons.recommendations.enabled".+' 'pref("extensions.htmlaboutaddons.recommendations.enabled", false);' greprefs.js
# Hide Report option
replace_line 'pref\("extensions.abuseReport.enabled".+' 'pref("extensions.abuseReport.enabled", false);' greprefs.js
# The first displayed Services.prompt dialog's size jumps around because sizeToContent() is called twice
# Fix by preventing the first sizeToContent() call if the icon hasn't been loaded yet
replace_line 'window.sizeToContent\(\);' 'if (ui.infoIcon.complete) window.sizeToContent();' chrome/toolkit/content/global/commonDialog.js
replace_line 'ui.infoIcon.addEventListener' 'if (!ui.infoIcon.complete) ui.infoIcon.addEventListener' chrome/toolkit/content/global/commonDialog.js
# Use native checkbox instead of Firefox-themed version in prompt dialogs
replace_line '<xul:checkbox' '<xul:checkbox native=\"true\"' chrome/toolkit/content/global/commonDialog.xhtml
zip -qr9XD omni.ja *
mv omni.ja ..
cd ..
python3 "$CALLDIR/scripts/optimizejars.py" --optimize ./ ./ ./
rm -rf omni
# Unzip browser/omni.ja and leave unzipped
cd browser
mkdir omni
mv omni.ja omni
cd omni
ls -la
set +e
unzip omni.ja
set -e
rm omni.ja
# Remove Firefox update URLs
remove_line 'pref\("app.update.url.(manual|details)' defaults/preferences/firefox-branding.js
# Remove Firefox overrides (e.g., to use Firefox-specific strings for connection errors)
remove_line '(override)' chrome/chrome.manifest
# Remove WebExtension APIs
remove_line ext-browser.json components/components.manifest
}
mkdir -p xulrunner
cd xulrunner
if [ $BUILD_MAC == 1 ]; then
GECKO_VERSION="$GECKO_VERSION_MAC"
DOWNLOAD_URL="https://ftp.mozilla.org/pub/firefox/releases/$GECKO_VERSION"
rm -rf Firefox.app
if [ -e "Firefox $GECKO_VERSION.app.zip" ]; then
echo "Using Firefox $GECKO_VERSION.app.zip"
unzip "Firefox $GECKO_VERSION.app.zip"
else
curl -o Firefox.dmg "$DOWNLOAD_URL/mac/en-US/Firefox%20$GECKO_VERSION.dmg"
set +e
hdiutil detach -quiet /Volumes/Firefox 2>/dev/null
set -e
hdiutil attach -quiet Firefox.dmg
cp -a /Volumes/Firefox/Firefox.app .
hdiutil detach -quiet /Volumes/Firefox
fi
# Download custom components
#echo
#rm -rf MacOS
#if [ -e "Firefox $GECKO_VERSION MacOS.zip" ]; then
# echo "Using Firefox $GECKO_VERSION MacOS.zip"
# unzip "Firefox $GECKO_VERSION MacOS.zip"
#else
# echo "Downloading Firefox $GECKO_VERSION MacOS.zip"
# curl -o MacOS.zip "${custom_components_url}Firefox%20$GECKO_VERSION%20MacOS.zip"
# unzip MacOS.zip
#fi
#echo
pushd Firefox.app/Contents/Resources
modify_omni mac
popd
if [ ! -e "Firefox $GECKO_VERSION.app.zip" ]; then
rm "Firefox.dmg"
fi
#if [ ! -e "Firefox $GECKO_VERSION MacOS.zip" ]; then
# rm "MacOS.zip"
#fi
fi
if [ $BUILD_WIN == 1 ]; then
GECKO_VERSION="$GECKO_VERSION_WIN"
DOWNLOAD_URL="https://ftp.mozilla.org/pub/firefox/releases/$GECKO_VERSION"
for arch in win32 win64; do
xdir=firefox-$arch
rm -rf $xdir
mkdir $xdir
curl -O "$DOWNLOAD_URL/$arch/en-US/Firefox%20Setup%20$GECKO_VERSION.exe"
7z x Firefox%20Setup%20$GECKO_VERSION.exe -o$xdir 'core/*'
mv $xdir/core $xdir-core
rm -rf $xdir
mv $xdir-core $xdir
pushd $xdir
modify_omni $arch
popd
rm "Firefox%20Setup%20$GECKO_VERSION.exe"
echo
echo
done
fi
if [ $BUILD_LINUX == 1 ]; then
GECKO_VERSION="$GECKO_VERSION_LINUX"
DOWNLOAD_URL="https://ftp.mozilla.org/pub/firefox/releases/$GECKO_VERSION"
rm -rf firefox
curl -O "$DOWNLOAD_URL/linux-i686/en-US/firefox-$GECKO_VERSION.tar.bz2"
rm -rf firefox-i686
tar xvf firefox-$GECKO_VERSION.tar.bz2
mv firefox firefox-i686
pushd firefox-i686
modify_omni linux32
popd
rm "firefox-$GECKO_VERSION.tar.bz2"
curl -O "$DOWNLOAD_URL/linux-x86_64/en-US/firefox-$GECKO_VERSION.tar.bz2"
rm -rf firefox-x86_64
tar xvf firefox-$GECKO_VERSION.tar.bz2
mv firefox firefox-x86_64
pushd firefox-x86_64
modify_omni linux64
popd
rm "firefox-$GECKO_VERSION.tar.bz2"
fi
echo Done

13
app/linux/set_launcher_icon Executable file
View file

@ -0,0 +1,13 @@
#!/bin/bash -e
#
# Run this to update the launcher file with the current path to the application icon
#
APPDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -w "$APPDIR"/zotero.desktop ]; then
sed -i -e "s@^Icon=.*@Icon=$APPDIR/chrome/icons/default/default256.png@" "$APPDIR"/zotero.desktop
else
echo "$APPDIR"/zotero.desktop is not writable
exit 1
fi

BIN
app/linux/updater-i686 Executable file

Binary file not shown.

BIN
app/linux/updater-x86_64 Executable file

Binary file not shown.

16
app/linux/zotero Executable file
View file

@ -0,0 +1,16 @@
#!/bin/bash
# Increase open files limit
#
# Mozilla file functions (OS.File.move()/copy(), NetUtil.asyncFetch/asyncCopy()) can leave file
# descriptors open for a few seconds (even with an explicit inputStream.close() in the case of
# the latter), so a source installation that copies ~500 translators and styles (with fds for
# source and target) can exceed the default 1024 limit.
# Current hard-limit on Ubuntu 16.10 is 4096
ulimit -n 4096
# Allow profile downgrade for Zotero
export MOZ_ALLOW_DOWNGRADE=1
CALLDIR="$(dirname "$(readlink -f "$0")")"
"$CALLDIR/zotero-bin" -app "$CALLDIR/app/application.ini" "$@"

9
app/linux/zotero.desktop Executable file
View file

@ -0,0 +1,9 @@
[Desktop Entry]
Name=Zotero
Exec=bash -c "$(dirname $(realpath $(echo %k | sed -e 's/^file:\/\///')))/zotero -url %U"
Icon=zotero.ico
Type=Application
Terminal=false
Categories=Office;
MimeType=text/plain;x-scheme-handler/zotero;application/x-research-info-systems;text/x-research-info-systems;text/ris;application/x-endnote-refer;application/x-inst-for-Scientific-info;application/mods+xml;application/rdf+xml;application/x-bibtex;text/x-bibtex;application/marc;application/vnd.citationstyles.style+xml
X-GNOME-SingleWindow=true

204
app/mac/Contents/Info.plist Normal file
View file

@ -0,0 +1,204 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleDocumentTypes</key>
<array>
<!-- Import formats -->
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>ris</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/x-research-info-systems</string>
<string>text/x-research-info-systems</string>
<string>text/ris</string>
<string>ris</string>
<string>application/x-endnote-refer</string>
</array>
<key>CFBundleTypeName</key>
<string>Research Information Systems Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>ciw</string>
<string>isi</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/x-inst-for-Scientific-info</string>
</array>
<key>CFBundleTypeName</key>
<string>ISI Common Export Format Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>mods</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/mods+xml</string>
</array>
<key>CFBundleTypeName</key>
<string>Metadata Object Description Schema Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>rdf</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/rdf+xml</string>
</array>
<key>CFBundleTypeName</key>
<string>Resource Description Framework Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>bib</string>
<string>bibtex</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/x-bibtex</string>
<string>text/x-bibtex</string>
</array>
<key>CFBundleTypeName</key>
<string>BibTeX Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>mrc</string>
<string>marc</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/marc</string>
</array>
<key>CFBundleTypeName</key>
<string>MARC Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<!-- Citation styles -->
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>csl</string>
<string>csl.txt</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeMIMETypes</key>
<array>
<string>application/vnd.citationstyles.style+xml</string>
</array>
<key>CFBundleTypeName</key>
<string>CSL Citation Style</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<!-- Hopefully, we don't become the default app for these -->
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>xml</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeName</key>
<string>XML Document</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>txt</string>
</array>
<!--<key>CFBundleTypeIconFile</key>
<string>document.icns</string>-->
<key>CFBundleTypeName</key>
<string>Text File</string>
<key>CFBundleTypeRole</key>
<string>Viewer</string>
</dict>
</array>
<key>CFBundleExecutable</key>
<string>zotero</string>
<key>CFBundleGetInfoString</key>
<string>Zotero {{VERSION}}, © 2006-2018 Contributors</string>
<key>CFBundleIconFile</key>
<string>zotero</string>
<key>CFBundleIdentifier</key>
<string>org.zotero.zotero</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>Zotero</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>{{VERSION_NUMERIC}}</string>
<key>CFBundleSignature</key>
<string>ZOTR</string>
<key>CFBundleURLTypes</key>
<array>
<dict>
<!--<key>CFBundleURLIconFile</key>
<string>document.icns</string>-->
<key>CFBundleURLName</key>
<string>zotero URL</string>
<key>CFBundleURLSchemes</key>
<array>
<string>zotero</string>
</array>
</dict>
</array>
<key>CFBundleVersion</key>
<string>{{VERSION_NUMERIC}}</string>
<key>NSAppleScriptEnabled</key>
<true/>
<key>CGDisableCoalescedUpdates</key>
<true/>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSSupportsAutomaticGraphicsSwitching</key>
<true/>
<key>LSMinimumSystemVersion</key>
<string>10.9.0</string>
</dict>
</plist>

1
app/mac/Contents/PkgInfo Normal file
View file

@ -0,0 +1 @@
APPLZOTR

Binary file not shown.

BIN
app/mac/DSStore Normal file

Binary file not shown.

68
app/mac/build-and-unify Executable file
View file

@ -0,0 +1,68 @@
#!/bin/bash
set -e
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
fx_app_name=Firefox.app
# Get Mozilla source directory from command line
if [ -z "${1:-}" ]; then
echo "Usage: $0 /path/to/mozilla-unified" >&2
exit 1
fi
GECKO_PATH=$1
mach=$GECKO_PATH/mach
# Set ZOTERO_REPOS_DIR to use directory other than $HOME for zotero-standalone-build
if [ -n "${ZOTERO_REPOS_DIR:-}" ]; then
repos_dir=$ZOTERO_REPOS_DIR
else
repos_dir=$HOME
fi
if [ ! -d "$repos_dir/zotero-standalone-build" ]; then
echo "$repos_dir/zotero-standalone-build not found" >&2
exit 1
fi
BUILD_DIR=`mktemp -d`
function cleanup {
rm -rf $BUILD_DIR
}
trap cleanup EXIT
set -x
export MOZ_BUILD_DATE=`date "+%Y%m%d%H%M%S"`
# Install required Rust version
rustup toolchain install $RUST_VERSION
rustup target add aarch64-apple-darwin
rustup target add x86_64-apple-darwin
rustup default $RUST_VERSION
cp "$SCRIPT_DIR/mozconfig" "$GECKO_PATH"
# Build Firefox for Intel and Apple Silicon
export Z_ARCH=x64
$mach build
$mach package
export Z_ARCH=aarch64
$mach build
$mach package
cd $BUILD_DIR
# Unify into Universal build
# From https://searchfox.org/mozilla-central/rev/97c902e8f92b15dc63eb584bfc594ecb041242a4/taskcluster/scripts/misc/unify.sh
for i in x86_64 aarch64; do
$mach python -m mozbuild.action.unpack_dmg "$GECKO_PATH"/obj-$i-apple-darwin/dist/*.dmg $i
done
mv x86_64 x64
$mach python "$GECKO_PATH/toolkit/mozapps/installer/unify.py" x64/*.app aarch64/*.app
cp x64/$fx_app_name/Contents/MacOS/firefox zotero
xz zotero
mv zotero.xz "$repos_dir"/zotero-standalone-build/mac/zotero.xz

34
app/mac/entitlements.xml Normal file
View file

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<!--
Entitlements to apply during codesigning of production builds.
-->
<plist version="1.0">
<dict>
<!-- Firefox needs to create executable pages (without MAP_JIT) -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key><true/>
<!-- Allow loading third party libraries. Needed for Flash and CDMs -->
<!-- Disabled for Zotero -->
<key>com.apple.security.cs.disable-library-validation</key><false/>
<!-- Firefox needs to access the microphone on sites the user allows -->
<!-- Disabled for Zotero -->
<key>com.apple.security.device.audio-input</key><false/>
<!-- Firefox needs to access the camera on sites the user allows -->
<!-- Disabled for Zotero -->
<key>com.apple.security.device.camera</key><false/>
<!-- Firefox needs to access the location on sites the user allows -->
<!-- Disabled for Zotero -->
<key>com.apple.security.personal-information.location</key><false/>
<!-- For SmartCardServices(7) -->
<!-- Disabled for Zotero -->
<key>com.apple.security.smartcard</key><false/>
<!-- Added for Zotero to control Word and bring windows to the front -->
<key>com.apple.security.automation.apple-events</key><true/>
</dict>
</plist>

27
app/mac/mozconfig Normal file
View file

@ -0,0 +1,27 @@
if [ "$Z_ARCH" == "x64" ]; then
ac_add_options --target=x86_64-apple-darwin
elif [ "$Z_ARCH" == "aarch64" ]; then
ac_add_options --target=aarch64-apple-darwin
fi
ac_add_options --enable-bootstrap
ac_add_options --with-macos-sdk=$HOME/tmp/MacOSX11.0.sdk
mk_add_options AUTOCLOBBER=1
# These don't all affect the stub, but they can't hurt, and we'll want them if
# we switch to custom XUL builds
ac_add_options MOZ_ENABLE_JS_DUMP=1
ac_add_options MOZ_ENABLE_FORKSERVER=
ac_add_options MOZ_TELEMETRY_REPORTING=
ac_add_options MOZ_DATA_REPORTING=
ac_add_options --disable-tests
ac_add_options --disable-debug
ac_add_options --disable-debug-symbols
ac_add_options --disable-webrtc
ac_add_options --disable-eme
export MOZILLA_OFFICIAL=1
export RELEASE_OR_BETA=1
MOZ_REQUIRE_SIGNING=
ac_add_options --enable-official-branding

124
app/mac/mozilla-102.patch Normal file
View file

@ -0,0 +1,124 @@
diff --git a/browser/app/nsBrowserApp.cpp b/browser/app/nsBrowserApp.cpp
--- a/browser/app/nsBrowserApp.cpp
+++ b/browser/app/nsBrowserApp.cpp
@@ -149,19 +149,29 @@ static bool IsArg(const char* arg, const
#endif
return false;
}
Bootstrap::UniquePtr gBootstrap;
static int do_main(int argc, char* argv[], char* envp[]) {
+ // Allow profile downgrade for Zotero
+ setenv("MOZ_ALLOW_DOWNGRADE", "1", 1);
+
// Allow firefox.exe to launch XULRunner apps via -app <application.ini>
// Note that -app must be the *first* argument.
- const char* appDataFile = getenv("XUL_APP_FILE");
+ UniqueFreePtr<char> iniPath = BinaryPath::GetApplicationIni();
+ if (!iniPath) {
+ Output("Couldn't find application.ini.\n");
+ return 255;
+ }
+ char *appDataFile = iniPath.get();
+
+
if ((!appDataFile || !*appDataFile) && (argc > 1 && IsArg(argv[1], "app"))) {
if (argc == 2) {
Output("Incorrect number of arguments passed to -app");
return 255;
}
appDataFile = argv[2];
char appEnv[MAXPATHLEN];
diff --git a/xpcom/build/BinaryPath.h b/xpcom/build/BinaryPath.h
--- a/xpcom/build/BinaryPath.h
+++ b/xpcom/build/BinaryPath.h
@@ -128,16 +128,56 @@ class BinaryPath {
} else {
rv = NS_ERROR_FAILURE;
}
CFRelease(executableURL);
return rv;
}
+ static nsresult GetApplicationIni(char aResult[MAXPATHLEN])
+ {
+ // Works even if we're not bundled.
+ CFBundleRef appBundle = CFBundleGetMainBundle();
+ if (!appBundle) {
+ return NS_ERROR_FAILURE;
+ }
+
+ CFURLRef iniURL = CFBundleCopyResourceURL(appBundle, CFSTR("application.ini"),
+ NULL, CFSTR("app"));
+ if (!iniURL) {
+ return NS_ERROR_FAILURE;
+ }
+
+ nsresult rv;
+ if (CFURLGetFileSystemRepresentation(iniURL, false, (UInt8*)aResult,
+ MAXPATHLEN)) {
+ // Sanitize path in case the app was launched from Terminal via
+ // './firefox' for example.
+ size_t readPos = 0;
+ size_t writePos = 0;
+ while (aResult[readPos] != '\0') {
+ if (aResult[readPos] == '.' && aResult[readPos + 1] == '/') {
+ readPos += 2;
+ } else {
+ aResult[writePos] = aResult[readPos];
+ readPos++;
+ writePos++;
+ }
+ }
+ aResult[writePos] = '\0';
+ rv = NS_OK;
+ } else {
+ rv = NS_ERROR_FAILURE;
+ }
+
+ CFRelease(iniURL);
+ return rv;
+ }
+
#elif defined(ANDROID)
static nsresult Get(char aResult[MAXPATHLEN]) {
// On Android, we use the MOZ_ANDROID_LIBDIR variable that is set by the
// Java bootstrap code.
const char* libDir = getenv("MOZ_ANDROID_LIBDIR");
if (!libDir) {
return NS_ERROR_FAILURE;
}
@@ -267,16 +307,29 @@ class BinaryPath {
if (NS_FAILED(Get(path))) {
return nullptr;
}
UniqueFreePtr<char> result;
result.reset(strdup(path));
return result;
}
+#if defined(XP_MACOSX)
+ static UniqueFreePtr<char> GetApplicationIni()
+ {
+ char path[MAXPATHLEN];
+ if (NS_FAILED(GetApplicationIni(path))) {
+ return nullptr;
+ }
+ UniqueFreePtr<char> result;
+ result.reset(strdup(path));
+ return result;
+ }
+#endif
+
#ifdef MOZILLA_INTERNAL_API
static nsresult GetFile(nsIFile** aResult) {
nsCOMPtr<nsIFile> lf;
# ifdef XP_WIN
wchar_t exePath[MAXPATHLEN];
nsresult rv = GetW(exePath);
# else
char exePath[MAXPATHLEN];

1520
app/mac/pkg-dmg Executable file

File diff suppressed because it is too large Load diff

BIN
app/mac/updater.tar.bz2 Normal file

Binary file not shown.

BIN
app/mac/zotero.xz Normal file

Binary file not shown.

@ -0,0 +1 @@
Subproject commit dc73919eff2b940918399fb220e5fb7595fed3d2

@ -0,0 +1 @@
Subproject commit 7e24a3d9f2f57ccefce09162fc357a958d94be34

@ -0,0 +1 @@
Subproject commit c3bca35acd153455e138f30a191e46407b2e9162

View file

@ -0,0 +1,23 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
CHANNEL="beta"
export SAFARI_APPEX="$ROOT_DIR/../safari-app-extension-builds/beta/ZoteroSafariExtension.appex"
cd "$SCRIPT_DIR"
./check_requirements
hash=`./get_repo_branch_hash master`
source_dir=`./get_commit_files $hash`
function cleanup {
rm -rf $source_dir
}
trap cleanup EXIT
"$ZOTERO_BUILD_DIR/xpi/build_xpi" -s "$source_dir" -c $CHANNEL -m $hash
./build_and_deploy -d "$ZOTERO_BUILD_DIR/xpi/build/staging" -p $BUILD_PLATFORMS -c $CHANNEL

View file

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
CHANNEL="dev"
BRANCH="master"
export SAFARI_APPEX="$ROOT_DIR/../safari-app-extension-builds/dev/ZoteroSafariExtension.appex"
cd "$SCRIPT_DIR"
./check_requirements
hash=`./get_repo_branch_hash $BRANCH`
source_dir=`./get_commit_files $hash`
function cleanup {
rm -rf $source_dir
}
trap cleanup EXIT
"$ZOTERO_BUILD_DIR/xpi/build_xpi" -s "$source_dir" -c $CHANNEL -m $hash
./build_and_deploy -d "$ZOTERO_BUILD_DIR/xpi/build/staging" -p $BUILD_PLATFORMS -c $CHANNEL -i 1

View file

@ -0,0 +1,23 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
CHANNEL="release"
BRANCH="master"
cd "$SCRIPT_DIR"
./check_requirements
hash=`./get_repo_branch_hash $BRANCH`
source_dir=`./get_commit_files $hash`
function cleanup {
rm -rf $source_dir
}
trap cleanup EXIT
"$ZOTERO_BUILD_DIR/xpi/build_xpi" -s "$source_dir" -c $CHANNEL -m $hash
./build_and_deploy -d "$ZOTERO_BUILD_DIR/xpi/build/staging" -p $BUILD_PLATFORMS -c $CHANNEL

52
app/scripts/add_omni_file Executable file
View file

@ -0,0 +1,52 @@
#!/bin/bash
set -euo pipefail
#
# Zip a file directly into app/omni.ja in staging/
#
# Zip paths are relative to the current directory, so this should be run from
# the client build/ directory
#
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
cat >&2 <<DONE
Usage: $0 path/to/file
DONE
exit 1
}
if [ -z "${1:-}" ]; then
usage
fi
files="$@"
for file in $files; do
if [ ! -f "$file" ]; then
echo "Error: $file not found!"
exit 1
fi
done
mac_path="$STAGE_DIR/Zotero.app/Contents/Resources"
win_path="$STAGE_DIR/Zotero_win32"
linux_path="$STAGE_DIR/Zotero_linux-x86_64"
added=0
for path in "$mac_path" "$win_path" "$linux_path"; do
if [ -d "$path" ]; then
echo "$path/app/omni.ja"
echo "Updating $(basename $(dirname $(dirname $path)))"
zip "$path/app/omni.ja" $files
added=1
fi
done
if [ $added -eq 0 ]; then
echo "No directories found in staging!"
exit 1
fi

14
app/scripts/bootstrap.sh Normal file
View file

@ -0,0 +1,14 @@
get_current_platform() {
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
echo l
elif [[ "$OSTYPE" == "darwin"* ]]; then
echo m
elif [[ "$OSTYPE" == "cygwin" ]]; then
echo w
elif [[ "$OSTYPE" == "msys" ]]; then
echo w
# Unknown, so probably Unix-y
else
echo l
fi
}

162
app/scripts/build_and_deploy Executable file
View file

@ -0,0 +1,162 @@
#!/bin/bash
#
# Builds and deploys Zotero with full and incremental updates
#
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
cat >&2 <<DONE
Usage: $0 -d SOURCE_DIR -c CHANNEL -p PLATFORMS
-d SOURCE_DIR Source directory to build from
-c CHANNEL Release channel ('release', 'beta', 'dev')
-p PLATFORMS Platforms to build (m=Mac, w=Windows, l=Linux)
-i INCREMENTALS Number of incremental builds to create
DONE
exit 1
}
SOURCE_DIR=""
CHANNEL=""
PLATFORMS=""
while getopts "d:c:p:i:" opt; do
case $opt in
d)
SOURCE_DIR="$OPTARG"
if [ ! -d "$SOURCE_DIR" ]; then
echo "$SOURCE_DIR not found"
exit 1
fi
;;
c)
CHANNEL="$OPTARG"
;;
p)
PLATFORMS="$OPTARG"
;;
i)
NUM_INCREMENTALS="$OPTARG"
;;
*)
usage
;;
esac
shift $((OPTIND-1)); OPTIND=1
done
if [[ -z "$SOURCE_DIR" ]] || [[ -z "$CHANNEL" ]] || [[ -z "$PLATFORMS" ]]; then
usage
fi
"$SCRIPT_DIR"/check_requirements
VERSION="`perl -ne 'print and last if s/.*<em:version>(.*)<\/em:version>.*/\1/;' \"$SOURCE_DIR\"/install.rdf`"
if [ -z "$VERSION" ]; then
echo "Error getting version from $SOURCE_DIR/install.rdf"
exit 1
fi
# Build Zotero
"$ROOT_DIR/build.sh" -d "$SOURCE_DIR" -p $PLATFORMS -c $CHANNEL -e
BUILD_ID=`cat "$DIST_DIR/build_id"`
if [ -z "$BUILD_ID" ]; then
echo "Error getting build id"
exit 1
fi
TEMP_DIR=`mktemp -d`
# Clean up on exit
function cleanup {
rm -rf "$TEMP_DIR"
}
trap cleanup EXIT
# Build full update
"$ROOT_DIR/update-packaging/build_autoupdate.sh" -f -c $CHANNEL -p $PLATFORMS -l $VERSION
# Build incremental updates for each platform
for i in `seq 0 1 $((${#PLATFORMS}-1))`
do
case ${PLATFORMS:i:1} in
m)
platform=mac
platform_name=Mac
;;
w)
platform=win
platform_name=Windows
;;
l)
platform=linux
platform_name=Linux
;;
*)
echo "$0: Invalid platform option ${PLATFORMS:i:1}"
usage
;;
esac
echo
echo "Getting $platform_name incrementals"
INCREMENTALS="`\"$SCRIPT_DIR/manage_incrementals\" -c $CHANNEL -p ${PLATFORMS:i:1} -n $NUM_INCREMENTALS`"
echo "$INCREMENTALS"
echo
for from in $INCREMENTALS; do
echo "Building incremental update for $platform_name from $from to $VERSION"
"$ROOT_DIR/update-packaging/build_autoupdate.sh" -i "$from" -c "$CHANNEL" -p ${PLATFORMS:i:1} -l $VERSION
echo
done
done
# Upload builds to S3
"$SCRIPT_DIR/upload_builds" $CHANNEL $VERSION
# Upload file lists for each platform
channel_deploy_path="$DEPLOY_PATH/$CHANNEL"
mkdir "$TEMP_DIR/version_info"
chmod g+ws "$TEMP_DIR/version_info"
cp "$DIST_DIR"/files-* "$TEMP_DIR/version_info"
chmod g+w "$TEMP_DIR"/version_info/files-*
rsync -rv "$TEMP_DIR/version_info/" $DEPLOY_HOST:"$channel_deploy_path/$VERSION/"
# Download updates JSON for each platform, update it, and reupload it
for i in `seq 0 1 $((${#PLATFORMS}-1))`
do
case ${PLATFORMS:i:1} in
m)
architectures="mac"
;;
w)
architectures="win32 win-x64"
;;
l)
architectures="linux-i686 linux-x86_64"
;;
esac
for arch in $architectures;
do
jsonfile="updates-$arch.json"
scp $DEPLOY_HOST:"$channel_deploy_path/$jsonfile" "$TEMP_DIR/$jsonfile"
"$ROOT_DIR/update-packaging/add_version_info" -f "$TEMP_DIR/$jsonfile" -v $VERSION -b $BUILD_ID
scp "$TEMP_DIR/$jsonfile" $DEPLOY_HOST:"$channel_deploy_path/$jsonfile"
done
done
# Add version to incremental lists
echo
for i in `seq 0 1 $((${#PLATFORMS}-1))`
do
"$SCRIPT_DIR/manage_incrementals" -c $CHANNEL -p ${PLATFORMS:i:1} -a $VERSION
done
$DEPLOY_CMD
rm -rf "$STAGE_DIR"/*

92
app/scripts/build_and_run Executable file
View file

@ -0,0 +1,92 @@
#!/bin/bash -e
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
# Set ZOTERO_REPOS_DIR to use directory other than $HOME for zotero-client and zotero-standalone-build
if [ -n "${ZOTERO_REPOS_DIR:-}" ]; then
repos_dir=$ZOTERO_REPOS_DIR
else
repos_dir=$HOME
fi
for dir in zotero-client zotero-standalone-build; do
if [ ! -d "$repos_dir/$dir" ]; then
echo "$repos_dir/$dir not found" >&2
exit 1
fi
done
# Set ZOTERO_PROFILE environment variable to choose profile
if [ -n "${ZOTERO_PROFILE:-}" ]; then
profile="-p $ZOTERO_PROFILE"
fi
REBUILD=0
SKIP_BUNDLED_FILES=0
DEBUGGER=0
while getopts "rbd" opt; do
case $opt in
r)
REBUILD=1
;;
b)
SKIP_BUNDLED_FILES=1
;;
d)
DEBUGGER=1
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
if [ $REBUILD -eq 1 ]; then
PARAMS=""
if [ $DEBUGGER -eq 1 ]; then
PARAMS="-t"
fi
# Check if build watch is running
# If not, run now
if ! ps u | grep scripts/build.js | grep -v grep > /dev/null; then
echo "Running JS build process"
echo
cd $repos_dir/zotero-client
npm run build
echo
fi
$repos_dir/zotero-standalone-build/scripts/dir_build -q $PARAMS
if [ "`uname`" = "Darwin" ]; then
# Sign the Word dylib so it works on Apple Silicon
$SCRIPT_DIR/codesign_local $repos_dir/zotero-standalone-build/staging/Zotero.app
fi
fi
PARAMS=""
if [ $SKIP_BUNDLED_FILES -eq 1 ]; then
PARAMS="$PARAMS -ZoteroSkipBundledFiles"
fi
if [ $DEBUGGER -eq 1 ]; then
PARAMS="$PARAMS -debugger"
fi
if [ "`uname`" = "Darwin" ]; then
command="Zotero.app/Contents/MacOS/zotero"
elif [ "`uname`" = "Linux" ]; then
command="Zotero_linux-x86_64/zotero"
elif [ "`uname -o 2> /dev/null`" = "Cygwin" ]; then
command="Zotero_win64/zotero.exe"
else
echo "Unknown platform" >&2
exit 1
fi
$repos_dir/zotero-standalone-build/staging/$command $profile -ZoteroDebugText -jsconsole -purgecaches $PARAMS "$@"

144
app/scripts/check_requirements Executable file
View file

@ -0,0 +1,144 @@
#!/bin/bash
set -uo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
. "$SCRIPT_DIR/bootstrap.sh"
platform=`get_current_platform`
FAIL_CMD='echo -e \033[31;1mFAIL\033[0m'
FAILED=0
hdr_start=`tput smul`
hdr_stop=`tput rmul`
echo "${hdr_start}Checking build requirements:${hdr_stop}"
echo
echo -n "Checking for perl: "
which perl || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for python3: "
which python3 || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for curl: "
which curl || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for wget: "
which wget || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for zip: "
which zip || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for unzip: "
which unzip || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for xz: "
which xz || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for awk: "
which awk || { $FAIL_CMD; FAILED=1; }
if [ $platform = "w" ]; then
echo -n "Checking for 7z: "
which 7z || { $FAIL_CMD; FAILED=1; }
echo "Checking for vcruntime140_1.dll: "
for arch in win64; do
echo -n " xulrunner/vc-$arch/vcruntime140_1.dll "
[ -f "$ROOT_DIR/xulrunner/vc-$arch/vcruntime140_1.dll" ] || { $FAIL_CMD; FAILED=1; }
done
echo
echo -n "Checking for rcedit: "
which rcedit || { $FAIL_CMD; FAILED=1; echo " -- Install with scripts/fetch_rcedit"; }
fi
echo -n "Checking for PDF tools: "
if [ $platform = "w" ]; then
[ -f "$ROOT_DIR/pdftools/pdftotext-win.exe" ] && ls "$ROOT_DIR/pdftools/pdftotext-win.exe" || { $FAIL_CMD; FAILED=1; }
elif [ $platform = "m" ]; then
[ -f "$ROOT_DIR/pdftools/pdftotext-mac" ] && ls "$ROOT_DIR/pdftools/pdftotext-mac" || { $FAIL_CMD; FAILED=1; }
elif [ $platform = "l" ]; then
[ -f "$ROOT_DIR/pdftools/pdftotext-linux-x86_64" ] && ls "$ROOT_DIR/pdftools/pdftotext-linux-x86_64" || { $FAIL_CMD; FAILED=1; }
fi
if [ $platform = "w" ]; then
echo
echo "${hdr_start}Checking Windows packaging requirements:${hdr_stop}"
echo
echo -n "Checking for upx: "
which upx || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for uuidgen: "
which uuidgen || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for signtool: "
if [ -x "`cygpath -u \"$SIGNTOOL\"`" ]; then
echo "`cygpath -u \"$SIGNTOOL\"`"
else
$FAIL_CMD
FAILED=1
fi
echo -n "Checking for Unicode NSIS: "
if [ -x "`cygpath -u \"${NSIS_DIR}makensis.exe\"`" ]; then
echo "`cygpath -u \"${NSIS_DIR}makensis.exe\"`"
else
$FAIL_CMD
FAILED=1
fi
plugin_path=$(cd "$NSIS_DIR\\Plugins" && pwd)
plugins="AppAssocReg ApplicationID InvokeShellVerb ShellLink UAC"
echo "Checking for NSIS plugins in $plugin_path"
for i in $plugins; do
echo -n " $i.dll: "
if [ -f "$plugin_path/$i.dll" ]; then
echo OK
else
$FAIL_CMD
FAILED=1
fi
done
fi
if [ $platform = "m" ]; then
echo
echo "${hdr_start}Checking Mac packaging requirements:${hdr_stop}"
echo
echo -n "Checking for codesign: "
which /usr/bin/codesign || { $FAIL_CMD; FAILED=1; }
fi
echo
echo "${hdr_start}Checking distribution requirements:${hdr_stop}"
echo
echo -n "Checking for Mozilla ARchive (MAR) tool: "
which mar || { $FAIL_CMD; FAILED=1; echo " -- Install with fetch_mar_tools"; }
echo -n "Checking for mbsdiff: "
which mbsdiff || { $FAIL_CMD; FAILED=1; echo " -- Install with fetch_mar_tools"; }
echo -n "Checking for rsync: "
which rsync || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for sha512sum/shasum: "
which sha512sum 2>/dev/null || which shasum 2>/dev/null || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for AWS CLI: "
which aws || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for AWS S3 access: "
aws s3 ls $S3_BUCKET/$S3_DIST_PATH | sed 's/^[[:blank:]]*//' || { $FAIL_CMD; FAILED=1; }
echo -n "Checking for deploy host directory access: "
ssh $DEPLOY_HOST ls -d $DEPLOY_PATH || { $FAIL_CMD; FAILED=1; }
exit $FAILED

42
app/scripts/codesign_local Executable file
View file

@ -0,0 +1,42 @@
#!/bin/bash
set -euo pipefail
# Perform ad-hoc code signing of Zotero.app for local usage
#
# Currently we sign only the Word dylib, since that's necessary for Zotero developers to work on
# Word integration on Apple Silicon. If we discover other problems, we can uncomment some of the
# other lines. If you're making a custom build, you can modify this file to sign the entire build
# instead of just the bare minimum needed for development.
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
if [ -z "${1:-}" ]; then
echo "Usage: $0 path/to/staging/Zotero.app"
exit 1
fi
APPDIR=$1
DEVELOPER_ID="-"
entitlements_file="$ROOT_DIR/mac/entitlements.xml"
#/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" \
# "$APPDIR/Contents/MacOS/pdftotext" \
# "$APPDIR/Contents/MacOS/pdfinfo" \
# "$APPDIR/Contents/MacOS/XUL" \
# "$APPDIR/Contents/MacOS/updater.app/Contents/MacOS/org.mozilla.updater"
#find "$APPDIR/Contents" -name '*.dylib' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;
#find "$APPDIR/Contents" -name '*.app' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;
#/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" "$APPDIR/Contents/MacOS/zotero"
# Skip signing of Safari extension, since it's not present for local builds
# Sign final app package
#echo
#/usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" "$APPDIR"
# Verify app
#/usr/bin/codesign --verify -vvvv "$APPDIR"
find "$APPDIR/Contents" -name 'libZoteroWordIntegration.dylib' -exec /usr/bin/codesign --force --options runtime --entitlements "$entitlements_file" --sign "$DEVELOPER_ID" {} \;

82
app/scripts/dir_build Executable file
View file

@ -0,0 +1,82 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
cat >&2 <<DONE
Usage: $0 -p platforms
Options
-p PLATFORMS Platforms to build (m=Mac, w=Windows, l=Linux)
-t add devtools
-q quick build (skip compression and other optional steps for faster restarts during development)
DONE
exit 1
}
DEVTOOLS=0
PLATFORM=""
quick_build=0
while getopts "tp:q" opt; do
case $opt in
t)
DEVTOOLS=1
;;
p)
for i in `seq 0 1 $((${#OPTARG}-1))`
do
case ${OPTARG:i:1} in
m) PLATFORM="m";;
w) PLATFORM="w";;
l) PLATFORM="l";;
*)
echo "$0: Invalid platform option ${OPTARG:i:1}"
usage
;;
esac
done
;;
q)
quick_build=1
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
if [[ -z $PLATFORM ]]; then
if [ "`uname`" = "Darwin" ]; then
PLATFORM="m"
elif [ "`uname`" = "Linux" ]; then
PLATFORM="l"
elif [ "`uname -o 2> /dev/null`" = "Cygwin" ]; then
PLATFORM="w"
fi
fi
CHANNEL="source"
VERSION=`perl -ne 'print and last if s/.*<em:version>(.{3}).+/\1/;' "$ZOTERO_SOURCE_DIR/install.rdf"`
if [ $VERSION = "4.0" ]; then
"$ZOTERO_BUILD_DIR/xpi/build_xpi_4.0" "$ZOTERO_SOURCE_DIR" $CHANNEL
"$ROOT_DIR/build.sh" -f "$ZOTERO_BUILD_DIR/xpi/build/zotero-build.xpi" -p $PLATFORM -c $CHANNEL -s
else
PARAMS=""
if [ $DEVTOOLS -eq 1 ]; then
PARAMS+=" -t"
fi
if [ $quick_build -eq 1 ]; then
PARAMS+=" -q"
fi
hash=`git -C "$ZOTERO_SOURCE_DIR" rev-parse --short HEAD`
"$ZOTERO_BUILD_DIR/xpi/build_xpi" -s "$ZOTERO_SOURCE_DIR/build" -c $CHANNEL -m $hash
"$ROOT_DIR/build.sh" -d "$ZOTERO_BUILD_DIR/xpi/build/staging" -p $PLATFORM -c $CHANNEL -s $PARAMS
fi
echo Done

11
app/scripts/fetch_rcedit Executable file
View file

@ -0,0 +1,11 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
cd "$ROOT_DIR"
mkdir -p "xulrunner/bin"
curl -L -o "xulrunner/bin/rcedit.exe" https://github.com/electron/rcedit/releases/download/v1.1.1/rcedit-x86.exe
chmod 755 xulrunner/bin/rcedit

21
app/scripts/get_commit_files Executable file
View file

@ -0,0 +1,21 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
if [ -z "${1:-}" ]; then
echo "Commit hash not provided"
exit 1
fi
hash="$1"
tmpdir=`mktemp -d`
cd $tmpdir
wget -O build.zip "https://$S3_BUCKET.s3.amazonaws.com/$S3_CI_ZIP_PATH/$hash.zip" >&2 \
|| (echo "ZIP file not found for commit '$hash'" && exit 1)
unzip build.zip >&2
rm build.zip
echo $tmpdir

View file

@ -0,0 +1,14 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
if [ -z "${1:-}" ]; then
echo "Usage: $0 branch"
exit 1
fi
branch=$1
git ls-remote --exit-code $SOURCE_REPO_URL $branch | cut -f 1

83
app/scripts/manage_incrementals Executable file
View file

@ -0,0 +1,83 @@
#!/bin/bash
#
# Manage list of deployed version numbers for a channel in order to generate incremental builds
#
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
cat >&2 <<DONE
Usage:
-c CHANNEL Release channel (e.g., 'beta'); defaults to 'release'
-p PLATFORM Platform (m=Mac, w=Windows, l=Linux)
-a VERSION Add version to incrementals list; cannot be used with -n
-n NUM_VERSIONS Number of previous versions to return; cannot be used with -a
DONE
exit 1
}
CHANNEL="release"
PLATFORM=""
VERSION=""
NUM_VERSIONS=""
while getopts "c:p:a:n:" opt; do
case $opt in
c)
CHANNEL="$OPTARG"
;;
p)
case "$OPTARG" in
m) PLATFORM=mac;;
w) PLATFORM=win;;
l) PLATFORM=linux;;
*)
echo "$0: Invalid platform option $OPTARG"
usage
;;
esac
;;
a)
VERSION="$OPTARG"
;;
n)
NUM_VERSIONS="$OPTARG"
;;
*)
usage
;;
esac
shift $((OPTIND-1)); OPTIND=1
done
if [[ -z "$PLATFORM" ]]; then
usage
fi
if [[ -z "$VERSION" ]] && [[ -z "$NUM_VERSIONS" ]]; then
usage
fi
if [[ "$VERSION" ]] && [[ "$NUM_VERSIONS" ]]; then
usage
fi
INCR_FILENAME="incrementals-$CHANNEL-$PLATFORM"
S3_URL="s3://$S3_BUCKET/$S3_DIST_PATH/$CHANNEL/incrementals-$PLATFORM"
INCR_PATH="$DIST_DIR/$INCR_FILENAME"
mkdir -p "$DIST_DIR"
aws s3 cp $S3_URL $INCR_PATH >&2
# Add version to file and reupload
if [ "$VERSION" ]; then
echo "Adding $VERSION to incrementals-$PLATFORM"
echo $VERSION >> $INCR_PATH
aws s3 cp $INCR_PATH $S3_URL
# Show last n versions
elif [ "$NUM_VERSIONS" ]; then
tail -n $NUM_VERSIONS $INCR_PATH
fi
rm $INCR_PATH

18
app/scripts/notarization_info Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
echo "Usage: $0 id"
exit 1
}
id=${1:-}
if [[ -z "$id" ]]; then
usage
fi
xcrun altool --notarization-info "$id" -u "$NOTARIZATION_USER" -p "$NOTARIZATION_PASSWORD" --output-format xml

View file

@ -0,0 +1,19 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
echo "Usage: $0 file"
exit 1
}
file=${1:-}
if [[ -z "$file" ]]; then
usage
fi
echo "Stapling $file"
xcrun stapler staple $file

18
app/scripts/notarization_status Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
echo "Usage: $0 id"
exit 1
}
id=${1:-}
if [[ -z "$id" ]]; then
usage
fi
$SCRIPT_DIR/notarization_info "$id" | plutil -extract notarization-info.Status xml1 -o - - | sed -n "s/.*<string>\(.*\)<\/string>.*/\1/p"

19
app/scripts/notarize_mac_app Executable file
View file

@ -0,0 +1,19 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
echo "Usage: $0 file"
exit 1
}
file=${1:-}
if [[ -z "$file" ]]; then
usage
fi
echo "Uploading ${file##*/} to Apple for notarization" >&2
xcrun altool --notarize-app --primary-bundle-id "$NOTARIZATION_BUNDLE_ID" --username "$NOTARIZATION_USER" --password "$NOTARIZATION_PASSWORD" --file $file --output-format xml

376
app/scripts/optimizejars.py Normal file
View file

@ -0,0 +1,376 @@
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is mozilla.org code
#
# The Initial Developer of the Original Code is
# Mozilla Foundation.
# Portions created by the Initial Developer are Copyright (C) 2010
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Taras Glek <tglek@mozilla.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys, os, subprocess, struct, re
local_file_header = [
("signature", "uint32"),
("min_version", "uint16"),
("general_flag", "uint16"),
("compression", "uint16"),
("lastmod_time", "uint16"),
("lastmod_date", "uint16"),
("crc32", "uint32"),
("compressed_size", "uint32"),
("uncompressed_size", "uint32"),
("filename_size", "uint16"),
("extra_field_size", "uint16"),
("filename", "filename_size"),
("extra_field", "extra_field_size"),
("data", "compressed_size")
]
cdir_entry = [
("signature", "uint32"),
("creator_version", "uint16"),
("min_version", "uint16"),
("general_flag", "uint16"),
("compression", "uint16"),
("lastmod_time", "uint16"),
("lastmod_date", "uint16"),
("crc32", "uint32"),
("compressed_size", "uint32"),
("uncompressed_size", "uint32"),
("filename_size", "uint16"),
("extrafield_size", "uint16"),
("filecomment_size", "uint16"),
("disknum", "uint16"),
("internal_attr", "uint16"),
("external_attr", "uint32"),
("offset", "uint32"),
("filename", "filename_size"),
("extrafield", "extrafield_size"),
("filecomment", "filecomment_size"),
]
cdir_end = [
("signature", "uint32"),
("disk_num", "uint16"),
("cdir_disk", "uint16"),
("disk_entries", "uint16"),
("cdir_entries", "uint16"),
("cdir_size", "uint32"),
("cdir_offset", "uint32"),
("comment_size", "uint16"),
]
type_mapping = { "uint32":"I", "uint16":"H"}
def format_struct (format):
string_fields = {}
fmt = "<"
for (name,value) in iter(format):
try:
fmt += type_mapping[value][0]
except KeyError:
string_fields[name] = value
return (fmt, string_fields)
def size_of(format):
return struct.calcsize(format_struct(format)[0])
class MyStruct:
def __init__(self, format, string_fields):
self.__dict__["struct_members"] = {}
self.__dict__["format"] = format
self.__dict__["string_fields"] = string_fields
def addMember(self, name, value):
self.__dict__["struct_members"][name] = value
def __getattr__(self, item):
try:
return self.__dict__["struct_members"][item]
except:
pass
print("no %s" %item)
print(self.__dict__["struct_members"])
raise AttributeError
def __setattr__(self, item, value):
if item in self.__dict__["struct_members"]:
self.__dict__["struct_members"][item] = value
else:
raise AttributeError
def pack(self):
extra_data = b""
values = []
string_fields = self.__dict__["string_fields"]
struct_members = self.__dict__["struct_members"]
format = self.__dict__["format"]
for (name,_) in format:
if name in string_fields:
if not isinstance(struct_members[name], bytes):
struct_members[name] = struct_members[name].encode('utf-8')
extra_data = extra_data + struct_members[name]
else:
values.append(struct_members[name]);
return struct.pack(format_struct(format)[0], *values) + extra_data
ENDSIG = 0x06054b50
def assert_true(cond, msg):
if not cond:
raise Exception(msg)
exit(1)
class BinaryBlob:
def __init__(self, f):
self.data = open(f, "rb").read()
self.offset = 0
self.length = len(self.data)
def readAt(self, pos, length):
self.offset = pos + length
return self.data[pos:self.offset]
def read_struct (self, format, offset = None):
if offset == None:
offset = self.offset
(fstr, string_fields) = format_struct(format)
size = struct.calcsize(fstr)
data = self.readAt(offset, size)
ret = struct.unpack(fstr, data)
retstruct = MyStruct(format, string_fields)
i = 0
for (name,_) in iter(format):
member_desc = None
if not name in string_fields:
member_data = ret[i]
i = i + 1
else:
# zip has data fields which are described by other struct fields, this does
# additional reads to fill em in
member_desc = string_fields[name]
member_data = self.readAt(self.offset, retstruct.__getattr__(member_desc))
retstruct.addMember(name, member_data)
# sanity check serialization code
data = self.readAt(offset, self.offset - offset)
out_data = retstruct.pack()
assert_true(out_data == data, "Serialization fail %d !=%d"% (len(out_data), len(data)))
return retstruct
def optimizejar(jar, outjar, inlog = None):
if inlog is not None:
inlog = open(inlog).read().rstrip()
# in the case of an empty log still move the index forward
if len(inlog) == 0:
inlog = []
else:
inlog = inlog.split("\n")
outlog = []
jarblob = BinaryBlob(jar)
dirend = jarblob.read_struct(cdir_end, jarblob.length - size_of(cdir_end))
assert_true(dirend.signature == ENDSIG, "no signature in the end");
cdir_offset = dirend.cdir_offset
readahead = 0
if inlog is None and cdir_offset == 4:
readahead = struct.unpack("<I", jarblob.readAt(0, 4))[0]
print("%s: startup data ends at byte %d" % (outjar, readahead));
total_stripped = 0;
jarblob.offset = cdir_offset
central_directory = []
for i in range(0, dirend.cdir_entries):
entry = jarblob.read_struct(cdir_entry)
if entry.filename[-1:] == "/":
total_stripped += len(entry.pack())
else:
total_stripped += entry.extrafield_size
central_directory.append(entry)
reordered_count = 0
if inlog is not None:
dup_guard = set()
for ordered_name in inlog:
if ordered_name in dup_guard:
continue
else:
dup_guard.add(ordered_name)
found = False
for i in range(reordered_count, len(central_directory)):
if central_directory[i].filename == ordered_name:
# swap the cdir entries
tmp = central_directory[i]
central_directory[i] = central_directory[reordered_count]
central_directory[reordered_count] = tmp
reordered_count = reordered_count + 1
found = True
break
if not found:
print( "Can't find '%s' in %s" % (ordered_name, jar))
outfd = open(outjar, "wb")
out_offset = 0
if inlog is not None:
# have to put central directory at offset 4 cos 0 confuses some tools.
# This also lets us specify how many entries should be preread
dirend.cdir_offset = 4
# make room for central dir + end of dir + 4 extra bytes at front
out_offset = dirend.cdir_offset + dirend.cdir_size + size_of(cdir_end) - total_stripped
outfd.seek(out_offset)
cdir_data = b""
written_count = 0
crc_mapping = {}
dups_found = 0
dupe_bytes = 0
# store number of bytes suggested for readahead
for entry in central_directory:
# read in the header twice..first for comparison, second time for convenience when writing out
jarfile = jarblob.read_struct(local_file_header, entry.offset)
assert_true(jarfile.filename == entry.filename, "Directory/Localheader mismatch")
# drop directory entries
if entry.filename[-1:] == "/":
total_stripped += len(jarfile.pack())
dirend.cdir_entries -= 1
continue
# drop extra field data
else:
total_stripped += jarfile.extra_field_size;
entry.extrafield = jarfile.extra_field = ""
entry.extrafield_size = jarfile.extra_field_size = 0
# January 1st, 2010
entry.lastmod_date = jarfile.lastmod_date = ((2010 - 1980) << 9) | (1 << 5) | 1
entry.lastmod_time = jarfile.lastmod_time = 0
data = jarfile.pack()
outfd.write(data)
old_entry_offset = entry.offset
entry.offset = out_offset
out_offset = out_offset + len(data)
entry_data = entry.pack()
cdir_data += entry_data
expected_len = entry.filename_size + entry.extrafield_size + entry.filecomment_size
assert_true(len(entry_data) != expected_len,
"%s entry size - expected:%d got:%d" % (entry.filename, len(entry_data), expected_len))
written_count += 1
if entry.crc32 in crc_mapping:
dups_found += 1
dupe_bytes += entry.compressed_size + len(data) + len(entry_data)
print("%s\n\tis a duplicate of\n%s\n---"%(entry.filename, crc_mapping[entry.crc32]))
else:
crc_mapping[entry.crc32] = entry.filename;
if inlog is not None:
if written_count == reordered_count:
readahead = out_offset
print("%s: startup data ends at byte %d"%( outjar, readahead));
elif written_count < reordered_count:
pass
#print("%s @ %d" % (entry.filename, out_offset))
elif readahead >= old_entry_offset + len(data):
outlog.append(entry.filename)
reordered_count += 1
if inlog is None:
dirend.cdir_offset = out_offset
if dups_found > 0:
print("WARNING: Found %d duplicate files taking %d bytes"%(dups_found, dupe_bytes))
dirend.cdir_size = len(cdir_data)
dirend.disk_entries = dirend.cdir_entries
dirend_data = dirend.pack()
assert_true(size_of(cdir_end) == len(dirend_data), "Failed to serialize directory end correctly. Serialized size;%d, expected:%d"%(len(dirend_data), size_of(cdir_end)));
outfd.seek(dirend.cdir_offset)
outfd.write(cdir_data)
outfd.write(dirend_data)
# for ordered jars the central directory is written in the begining of the file, so a second central-directory
# entry has to be written in the end of the file
if inlog is not None:
outfd.seek(0)
outfd.write(struct.pack("<I", readahead));
outfd.seek(out_offset)
outfd.write(dirend_data)
print("Stripped %d bytes" % total_stripped)
print("%s %d/%d in %s" % (("Ordered" if inlog is not None else "Deoptimized"),
reordered_count, len(central_directory), outjar))
outfd.close()
return outlog
if len(sys.argv) != 5:
print("Usage: --optimize|--deoptimize %s JAR_LOG_DIR IN_JAR_DIR OUT_JAR_DIR" % sys.argv[0])
exit(1)
jar_regex = re.compile("\\.jar?$")
def optimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR):
ls = os.listdir(IN_JAR_DIR)
for jarfile in ls:
if not re.search(jar_regex, jarfile):
continue
injarfile = os.path.join(IN_JAR_DIR, jarfile)
outjarfile = os.path.join(OUT_JAR_DIR, jarfile)
logfile = os.path.join(JAR_LOG_DIR, jarfile + ".log")
if not os.path.isfile(logfile):
logfile = None
optimizejar(injarfile, outjarfile, logfile)
def deoptimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR):
if not os.path.exists(JAR_LOG_DIR):
os.makedirs(JAR_LOG_DIR)
ls = os.listdir(IN_JAR_DIR)
for jarfile in ls:
if not re.search(jar_regex, jarfile):
continue
injarfile = os.path.join(IN_JAR_DIR, jarfile)
outjarfile = os.path.join(OUT_JAR_DIR, jarfile)
logfile = os.path.join(JAR_LOG_DIR, jarfile + ".log")
log = str(optimizejar(injarfile, outjarfile, None))
open(logfile, "wb").write("\n".join(log).encode('utf-8'))
def main():
MODE = sys.argv[1]
JAR_LOG_DIR = sys.argv[2]
IN_JAR_DIR = sys.argv[3]
OUT_JAR_DIR = sys.argv[4]
if MODE == "--optimize":
optimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR)
elif MODE == "--deoptimize":
deoptimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR)
else:
print("Unknown mode %s" % MODE)
exit(1)
if __name__ == '__main__':
main()

24
app/scripts/upload_builds Executable file
View file

@ -0,0 +1,24 @@
#!/bin/bash
#
# Upload build archives from 'dist' to S3 with the specified channel and version
#
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
function usage {
echo Usage: $0 CHANNEL VERSION >&2
exit 1
}
CHANNEL="${1:-}"
VERSION="${2:-}"
if [[ -z "$CHANNEL" ]] || [[ -z "$VERSION" ]]; then
usage
fi
url="s3://$S3_BUCKET/$S3_DIST_PATH/$CHANNEL/$VERSION/"
aws s3 sync --exclude "files-*" --exclude build_id "$DIST_DIR" $url

View file

@ -0,0 +1,66 @@
#!/usr/bin/env python3
"""
Update a builds manifest with info on a given build
"""
import argparse
import os
import sys
import shutil
import json
import traceback
DETAILS_URLS = {
'4.0': 'https://www.zotero.org/support/4.0_changelog',
'5.0': 'https://www.zotero.org/support/5.0_changelog',
'6.0': 'https://www.zotero.org/support/6.0_changelog',
'7.0': 'https://www.zotero.org/support/7.0_changelog'
}
MAJOR = None
parser = argparse.ArgumentParser(
description='Update a builds manifest with info on a given build',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-f', '--file', required=True, help="path to updates.json")
parser.add_argument('-v', '--version', required=True, help='version number of build')
parser.add_argument('-b', '--build_id', required=True, help="build ID ('20160801142343')")
args = parser.parse_args()
def main():
try:
file = args.file
version = args.version
# Back up JSON file
shutil.copy2(file, file + '.bak')
# Read in existing file
with open(file) as f:
updates = json.loads(f.read())
updates.append({
'version': version,
'buildID': args.build_id,
'detailsURL': DETAILS_URLS[version[0:3]],
'major': MAJOR
})
# Keep last 5 entries
updates = updates[-5:]
# Write new file
updates = json.dumps(updates, indent=2)
with open(file, 'w') as f:
f.write(updates + "\n")
print(updates)
return 0
except Exception as err:
sys.stderr.write("\n" + traceback.format_exc())
return 1
if __name__ == '__main__':
sys.exit(main())

View file

@ -0,0 +1,326 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
UPDATE_STAGE_DIR="$SCRIPT_DIR/staging"
function usage {
cat >&2 <<DONE
Usage: $0 -f [-i FROM_VERSION] [-c CHANNEL] [-p PLATFORMS] [-l] VERSION
Options
-f Perform full build
-i FROM Perform incremental build
-c CHANNEL Release channel ('release', 'beta') (required for incremental builds)
-p PLATFORMS Platforms to build (m=Mac, w=Windows, l=Linux)
-l Use local TO directory instead of downloading TO files from S3
DONE
exit 1
}
# From https://gist.github.com/cdown/1163649#gistcomment-1639097
urlencode() {
local LANG=C
local length="${#1}"
for (( i = 0; i < length; i++ )); do
local c="${1:i:1}"
case $c in
[a-zA-Z0-9.~_-]) printf "$c" ;;
*) printf '%%%02X' "'$c" ;;
esac
done
}
if [ "`uname -o 2> /dev/null`" = "Cygwin" ]; then
WIN_NATIVE=1
else
WIN_NATIVE=0
fi
BUILD_FULL=0
BUILD_INCREMENTAL=0
FROM=""
CHANNEL=""
BUILD_MAC=0
BUILD_WIN=0
BUILD_LINUX=0
USE_LOCAL_TO=0
while getopts "i:c:p:fl" opt; do
case $opt in
i)
FROM="$OPTARG"
BUILD_INCREMENTAL=1
;;
c)
CHANNEL="$OPTARG"
;;
p)
for i in `seq 0 1 $((${#OPTARG}-1))`
do
case ${OPTARG:i:1} in
m) BUILD_MAC=1;;
w) BUILD_WIN=1;;
l) BUILD_LINUX=1;;
*)
echo "$0: Invalid platform option ${OPTARG:i:1}"
usage
;;
esac
done
;;
f)
BUILD_FULL=1
;;
l)
USE_LOCAL_TO=1
;;
*)
usage
;;
esac
shift $((OPTIND-1)); OPTIND=1
done
shift $(($OPTIND - 1))
TO=${1:-}
if [ -z "$TO" ]; then
usage
fi
if [ -z "$FROM" ] && [ $BUILD_FULL -eq 0 ]; then
usage
fi
if [[ $BUILD_INCREMENTAL -eq 1 ]] && [[ -z "$CHANNEL" ]]; then
echo "Channel not provided for incremental builds" >&2
exit 1
fi
# Require at least one platform
if [[ $BUILD_MAC == 0 ]] && [[ $BUILD_WIN == 0 ]] && [[ $BUILD_LINUX == 0 ]]; then
usage
fi
rm -rf "$UPDATE_STAGE_DIR"
mkdir "$UPDATE_STAGE_DIR"
INCREMENTALS_FOUND=0
for version in "$FROM" "$TO"; do
if [[ $version == "$TO" ]] && [[ $INCREMENTALS_FOUND == 0 ]] && [[ $BUILD_FULL == 0 ]]; then
exit
fi
if [ -z "$version" ]; then
continue
fi
echo "Getting Zotero version $version"
versiondir="$UPDATE_STAGE_DIR/$version"
#
# Use main build script's staging directory for TO files rather than downloading the given version.
#
# The caller must ensure that the files in ../staging match the platforms and version given.
if [[ $version == $TO && $USE_LOCAL_TO == "1" ]]; then
if [ ! -d "$STAGE_DIR" ]; then
echo "Can't find local TO dir $STAGE_DIR"
exit 1
fi
echo "Using files from $STAGE_DIR"
ln -s "$STAGE_DIR" "$versiondir"
continue
fi
#
# Otherwise, download version from S3
#
mkdir -p "$versiondir"
cd "$versiondir"
MAC_ARCHIVE="Zotero-${version}.dmg"
WIN32_ARCHIVE="Zotero-${version}_win32.zip"
WIN64_ARCHIVE="Zotero-${version}_x64.zip"
LINUX_X86_ARCHIVE="Zotero-${version}_linux-i686.tar.bz2"
LINUX_X86_64_ARCHIVE="Zotero-${version}_linux-x86_64.tar.bz2"
CACHE_DIR="$ROOT_DIR/cache"
if [ ! -e "$CACHE_DIR" ]; then
mkdir "$CACHE_DIR"
fi
for archive in "$MAC_ARCHIVE" "$WIN32_ARCHIVE" "$WIN64_ARCHIVE" "$LINUX_X86_ARCHIVE" "$LINUX_X86_64_ARCHIVE"; do
if [[ $archive = "$MAC_ARCHIVE" ]] && [[ $BUILD_MAC != 1 ]]; then
continue
fi
if [[ $archive = "$WIN32_ARCHIVE" ]] && [[ $BUILD_WIN != 1 ]]; then
continue
fi
if [[ $archive = "$WIN64_ARCHIVE" ]] && [[ $BUILD_WIN != 1 ]]; then
continue
fi
if [[ $archive = "$LINUX_X86_ARCHIVE" ]] && [[ $BUILD_LINUX != 1 ]]; then
continue
fi
if [[ $archive = "$LINUX_X86_64_ARCHIVE" ]] && [[ $BUILD_LINUX != 1 ]]; then
continue
fi
ETAG_FILE="$CACHE_DIR/$archive.etag"
# Check cache for archive
if [[ -f "$CACHE_DIR/$archive" ]] && [[ -f "$CACHE_DIR/$archive.etag" ]]; then
ETAG="`cat $ETAG_FILE | tr '\n' ' '`"
else
ETAG=""
fi
rm -f $archive
# URL-encode '+' in beta version numbers
ENCODED_VERSION=`urlencode $version`
ENCODED_ARCHIVE=`urlencode $archive`
URL="https://$S3_BUCKET.s3.amazonaws.com/$S3_DIST_PATH/$CHANNEL/$ENCODED_VERSION/$ENCODED_ARCHIVE"
echo "Fetching $URL"
set +e
# Cached version is available
if [ -n "$ETAG" ]; then
NEW_ETAG=$(wget -nv -S --header "If-None-Match: $ETAG" $URL 2>&1 | awk '/ *ETag: */ {print $2}')
# If ETag didn't match, cache newly downloaded version
if [ -f $archive ]; then
echo "ETag for $archive didn't match! -- using new version"
rm -f "$CACHE_DIR/$archive.etag"
cp $archive "$CACHE_DIR/"
echo "$NEW_ETAG" > "$CACHE_DIR/$archive.etag"
# If ETag matched (or there was another error), use cached version
else
echo "Using cached $archive"
cp "$CACHE_DIR/$archive" .
fi
else
NEW_ETAG=$(wget -nv -S $URL 2>&1 | awk '/ *ETag: */ {print $2}')
# Save archive to cache
rm -f "$CACHE_DIR/$archive.etag"
cp $archive "$CACHE_DIR/"
echo "$NEW_ETAG" > "$CACHE_DIR/$archive.etag"
fi
set -e
done
# Delete cached files older than 14 days
find "$CACHE_DIR" -ctime +14 -delete
# Unpack Zotero.app
if [ $BUILD_MAC == 1 ]; then
if [ -f "$MAC_ARCHIVE" ]; then
set +e
hdiutil detach -quiet /Volumes/Zotero 2>/dev/null
set -e
hdiutil attach -quiet "$MAC_ARCHIVE"
cp -R /Volumes/Zotero/Zotero.app "$versiondir"
rm "$MAC_ARCHIVE"
hdiutil detach -quiet /Volumes/Zotero
INCREMENTALS_FOUND=1
else
echo "$MAC_ARCHIVE not found"
fi
fi
# Unpack Windows zips
if [ $BUILD_WIN == 1 ]; then
if [[ -f "$WIN32_ARCHIVE" ]] && [[ -f "$WIN64_ARCHIVE" ]]; then
for build in "$WIN32_ARCHIVE" "$WIN64_ARCHIVE"; do
unzip -q "$build"
rm "$build"
done
INCREMENTALS_FOUND=1
else
echo "$WIN32_ARCHIVE/$WIN64_ARCHIVE not found"
fi
fi
# Unpack Linux tarballs
if [ $BUILD_LINUX == 1 ]; then
if [[ -f "$LINUX_X86_ARCHIVE" ]] && [[ -f "$LINUX_X86_64_ARCHIVE" ]]; then
for build in "$LINUX_X86_ARCHIVE" "$LINUX_X86_64_ARCHIVE"; do
tar -xjf "$build"
rm "$build"
done
INCREMENTALS_FOUND=1
else
echo "$LINUX_X86_ARCHIVE/$LINUX_X86_64_ARCHIVE not found"
fi
fi
echo
done
CHANGES_MADE=0
for build in "mac" "win32" "win64" "linux-i686" "linux-x86_64"; do
if [[ $build == "mac" ]]; then
if [[ $BUILD_MAC == 0 ]]; then
continue
fi
dir="Zotero.app"
else
if [[ $build == "win32" ]] || [[ $build == "win64" ]] && [[ $BUILD_WIN == 0 ]]; then
continue
fi
if [[ $build == "linux-i686" ]] || [[ $build == "linux-x86_64" ]] && [[ $BUILD_LINUX == 0 ]]; then
continue
fi
dir="Zotero_$build"
touch "$UPDATE_STAGE_DIR/$TO/$dir/precomplete"
cp "$SCRIPT_DIR/removed-files_$build" "$UPDATE_STAGE_DIR/$TO/$dir/removed-files"
fi
if [[ $BUILD_INCREMENTAL == 1 ]] && [[ -d "$UPDATE_STAGE_DIR/$FROM/$dir" ]]; then
echo
echo "Building incremental $build update from $FROM to $TO"
# mbsdiff fails on paths with symlink
if [ $WIN_NATIVE == 1 ]; then
cur=`pwd`
from_dir="`realpath --relative-to=\"$cur\" \"$UPDATE_STAGE_DIR/$FROM/$dir\"`"
to_dir="`realpath --relative-to=\"$cur\" \"$UPDATE_STAGE_DIR/$TO/$dir\"`"
else
from_dir="$UPDATE_STAGE_DIR/$FROM/$dir"
to_dir="$UPDATE_STAGE_DIR/$TO/$dir"
fi
"$SCRIPT_DIR/make_incremental_update.sh" "$DIST_DIR/Zotero-${TO}-${FROM}_$build.mar" "$from_dir" "$to_dir"
CHANGES_MADE=1
fi
if [[ $BUILD_FULL == 1 ]]; then
echo
echo "Building full $build update for $TO"
"$SCRIPT_DIR/make_full_update.sh" "$DIST_DIR/Zotero-${TO}-full_$build.mar" "$UPDATE_STAGE_DIR/$TO/$dir"
CHANGES_MADE=1
fi
done
rm -rf "$UPDATE_STAGE_DIR"
# Update file manifests
if [ $CHANGES_MADE -eq 1 ]; then
# Cygwin has sha512sum, macOS has shasum, Linux has both
if [[ -n "`which sha512sum 2> /dev/null`" ]]; then
SHACMD="sha512sum"
else
SHACMD="shasum -a 512"
fi
cd "$DIST_DIR"
for platform in "mac" "win" "linux"; do
file=files-$platform
rm -f $file
for fn in `find . -name "*$platform*.mar" -exec basename {} \;`; do
size=`wc -c "$fn" | awk '{print $1}'`
hash=`$SHACMD "$fn" | awk '{print $1}'`
echo $fn $hash $size >> $file
done
done
fi

View file

@ -0,0 +1,215 @@
#!/bin/bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Code shared by update packaging scripts.
# Author: Darin Fisher
#
# -----------------------------------------------------------------------------
# By default just assume that these tools exist on our path
MAR=${MAR:-mar}
BZIP2=${BZIP2:-bzip2}
MBSDIFF=${MBSDIFF:-mbsdiff}
# -----------------------------------------------------------------------------
# Helper routines
notice() {
echo "$*" 1>&2
}
get_file_size() {
info=($(ls -ln "$1"))
echo ${info[4]}
}
copy_perm() {
reference="$1"
target="$2"
if [ -x "$reference" ]; then
chmod 0755 "$target"
else
chmod 0644 "$target"
fi
}
make_add_instruction() {
f="$1"
filev2="$2"
# The third param will be an empty string when a file add instruction is only
# needed in the version 2 manifest. This only happens when the file has an
# add-if-not instruction in the version 3 manifest. This is due to the
# precomplete file prior to the version 3 manifest having a remove instruction
# for this file so the file is removed before applying a complete update.
filev3="$3"
# Used to log to the console
if [ $4 ]; then
forced=" (forced)"
else
forced=
fi
# Changed by Zotero for -e
is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/') || true
if [ $is_extension = "1" ]; then
# Use the subdirectory of the extensions folder as the file to test
# before performing this add instruction.
testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
notice " add-if \"$testdir\" \"$f\""
echo "add-if \"$testdir\" \"$f\"" >> $filev2
if [ ! $filev3 = "" ]; then
echo "add-if \"$testdir\" \"$f\"" >> $filev3
fi
else
notice " add \"$f\"$forced"
echo "add \"$f\"" >> $filev2
if [ ! $filev3 = "" ]; then
echo "add \"$f\"" >> $filev3
fi
fi
}
check_for_add_if_not_update() {
add_if_not_file_chk="$1"
if [ `basename $add_if_not_file_chk` = "channel-prefs.js" -o \
`basename $add_if_not_file_chk` = "update-settings.ini" ]; then
## "true" *giggle*
return 0;
fi
## 'false'... because this is bash. Oh yay!
return 1;
}
check_for_add_to_manifestv2() {
add_if_not_file_chk="$1"
if [ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
## "true" *giggle*
return 0;
fi
## 'false'... because this is bash. Oh yay!
return 1;
}
make_add_if_not_instruction() {
f="$1"
filev3="$2"
notice " add-if-not \"$f\" \"$f\""
echo "add-if-not \"$f\" \"$f\"" >> $filev3
}
make_patch_instruction() {
f="$1"
filev2="$2"
filev3="$3"
is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/') || true
if [ $is_extension = "1" ]; then
# Use the subdirectory of the extensions folder as the file to test
# before performing this add instruction.
testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
notice " patch-if \"$testdir\" \"$f.patch\" \"$f\""
echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev2
echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev3
else
notice " patch \"$f.patch\" \"$f\""
echo "patch \"$f.patch\" \"$f\"" >> $filev2
echo "patch \"$f.patch\" \"$f\"" >> $filev3
fi
}
append_remove_instructions() {
dir="$1"
filev2="$2"
filev3="$3"
if [ -f "$dir/removed-files" ]; then
listfile="$dir/removed-files"
elif [ -f "$dir/Contents/Resources/removed-files" ]; then
listfile="$dir/Contents/Resources/removed-files"
fi
if [ -n "$listfile" ]; then
# Changed by Zotero: Use subshell and disable filename globbing to prevent bash from expanding
# entries in removed-files with paths from the root (e.g., 'xulrunner/*')
(
set -f
# Map spaces to pipes so that we correctly handle filenames with spaces.
files=($(cat "$listfile" | tr " " "|" | sort -r))
num_files=${#files[*]}
for ((i=0; $i<$num_files; i=$i+1)); do
# Map pipes back to whitespace and remove carriage returns
f=$(echo ${files[$i]} | tr "|" " " | tr -d '\r')
# Trim whitespace
f=$(echo $f)
# Exclude blank lines.
if [ -n "$f" ]; then
# Exclude comments
if [ ! $(echo "$f" | grep -c '^#') = 1 ]; then
if [ $(echo "$f" | grep -c '\/$') = 1 ]; then
notice " rmdir \"$f\""
echo "rmdir \"$f\"" >> $filev2
echo "rmdir \"$f\"" >> $filev3
elif [ $(echo "$f" | grep -c '\/\*$') = 1 ]; then
# Remove the *
f=$(echo "$f" | sed -e 's:\*$::')
notice " rmrfdir \"$f\""
echo "rmrfdir \"$f\"" >> $filev2
echo "rmrfdir \"$f\"" >> $filev3
else
notice " remove \"$f\""
echo "remove \"$f\"" >> $filev2
echo "remove \"$f\"" >> $filev3
fi
fi
fi
done
)
fi
}
# List all files in the current directory, stripping leading "./"
# Pass a variable name and it will be filled as an array.
list_files() {
count=0
find . -type f \
! -name "update.manifest" \
! -name "updatev2.manifest" \
! -name "updatev3.manifest" \
! -name "temp-dirlist" \
! -name "temp-filelist" \
| sed 's/\.\/\(.*\)/\1/' \
| sort -r > "temp-filelist"
while read file; do
eval "${1}[$count]=\"$file\""
# Changed for Zotero to avoid eval as 1
#(( count++ ))
(( ++count ))
done < "temp-filelist"
rm "temp-filelist"
}
# List all directories in the current directory, stripping leading "./"
list_dirs() {
count=0
find . -type d \
! -name "." \
! -name ".." \
| sed 's/\.\/\(.*\)/\1/' \
| sort -r > "temp-dirlist"
while read dir; do
eval "${1}[$count]=\"$dir\""
# Changed for Zotero
#(( count++ ))
(( ++count ))
done < "temp-dirlist"
rm "temp-dirlist"
}

View file

@ -0,0 +1,124 @@
#!/bin/bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This tool generates full update packages for the update system.
# Author: Darin Fisher
#
# Added for Zotero
set -eo pipefail
. $(dirname "$0")/common.sh
# -----------------------------------------------------------------------------
print_usage() {
notice "Usage: $(basename $0) [OPTIONS] ARCHIVE DIRECTORY"
}
if [ $# = 0 ]; then
print_usage
exit 1
fi
if [ $1 = -h ]; then
print_usage
notice ""
notice "The contents of DIRECTORY will be stored in ARCHIVE."
notice ""
notice "Options:"
notice " -h show this help text"
notice ""
exit 1
fi
# -----------------------------------------------------------------------------
archive="$1"
targetdir="$2"
# Prevent the workdir from being inside the targetdir so it isn't included in
# the update mar.
if [ $(echo "$targetdir" | grep -c '\/$') = 1 ]; then
# Remove the /
targetdir=$(echo "$targetdir" | sed -e 's:\/$::')
fi
workdir="$targetdir.work"
updatemanifestv2="$workdir/updatev2.manifest"
updatemanifestv3="$workdir/updatev3.manifest"
targetfiles="updatev2.manifest updatev3.manifest"
mkdir -p "$workdir"
# Generate a list of all files in the target directory.
pushd "$targetdir"
if test $? -ne 0 ; then
exit 1
fi
if [ ! -f "precomplete" ]; then
if [ ! -f "Contents/Resources/precomplete" ]; then
notice "precomplete file is missing!"
exit 1
fi
fi
list_files files
popd
# Add the type of update to the beginning of the update manifests.
> $updatemanifestv2
> $updatemanifestv3
notice ""
notice "Adding type instruction to update manifests"
notice " type complete"
echo "type \"complete\"" >> $updatemanifestv2
echo "type \"complete\"" >> $updatemanifestv3
notice ""
notice "Adding file add instructions to update manifests"
num_files=${#files[*]}
for ((i=0; $i<$num_files; i=$i+1)); do
f="${files[$i]}"
if check_for_add_if_not_update "$f"; then
make_add_if_not_instruction "$f" "$updatemanifestv3"
if check_for_add_to_manifestv2 "$f"; then
make_add_instruction "$f" "$updatemanifestv2" "" 1
fi
else
make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
fi
dir=$(dirname "$f")
mkdir -p "$workdir/$dir"
$BZIP2 -cz9 "$targetdir/$f" > "$workdir/$f"
copy_perm "$targetdir/$f" "$workdir/$f"
targetfiles="$targetfiles \"$f\""
done
# Append remove instructions for any dead files.
notice ""
notice "Adding file and directory remove instructions from file 'removed-files'"
append_remove_instructions "$targetdir" "$updatemanifestv2" "$updatemanifestv3"
$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
# Changed for Zotero -- -C is unreliable
pushd $workdir > /dev/null
eval "$MAR -c output.mar $targetfiles"
popd > /dev/null
mv -f "$workdir/output.mar" "$archive"
# cleanup
rm -fr "$workdir"
notice ""
notice "Finished"
notice ""

View file

@ -0,0 +1,326 @@
#!/bin/bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This tool generates incremental update packages for the update system.
# Author: Darin Fisher
#
set -eo pipefail
. $(dirname "$0")/common.sh
# -----------------------------------------------------------------------------
print_usage() {
notice "Usage: $(basename $0) [OPTIONS] ARCHIVE FROMDIR TODIR"
notice ""
notice "The differences between FROMDIR and TODIR will be stored in ARCHIVE."
notice ""
notice "Options:"
notice " -h show this help text"
notice " -f clobber this file in the installation"
notice " Must be a path to a file to clobber in the partial update."
notice ""
}
check_for_forced_update() {
force_list="$1"
forced_file_chk="$2"
local f
if [ "$forced_file_chk" = "precomplete" ]; then
## "true" *giggle*
return 0;
fi
if [ "$forced_file_chk" = "Contents/Resources/precomplete" ]; then
## "true" *giggle*
return 0;
fi
if [ "$forced_file_chk" = "removed-files" ]; then
## "true" *giggle*
return 0;
fi
if [ "$forced_file_chk" = "Contents/Resources/removed-files" ]; then
## "true" *giggle*
return 0;
fi
if [ "${forced_file_chk##*.}" = "chk" ]; then
## "true" *giggle*
return 0;
fi
for f in $force_list; do
#echo comparing $forced_file_chk to $f
if [ "$forced_file_chk" = "$f" ]; then
## "true" *giggle*
return 0;
fi
done
## 'false'... because this is bash. Oh yay!
return 1;
}
if [ $# = 0 ]; then
print_usage
exit 1
fi
requested_forced_updates='Contents/MacOS/firefox'
while getopts "hf:" flag
do
case "$flag" in
h) print_usage; exit 0
;;
f) requested_forced_updates="$requested_forced_updates $OPTARG"
;;
?) print_usage; exit 1
;;
esac
done
# -----------------------------------------------------------------------------
set +e
let arg_start=$OPTIND-1
shift $arg_start
set -e
archive="$1"
olddir="$2"
newdir="$3"
# Prevent the workdir from being inside the targetdir so it isn't included in
# the update mar.
if [ $(echo "$newdir" | grep -c '\/$') = 1 ]; then
# Remove the /
newdir=$(echo "$newdir" | sed -e 's:\/$::')
fi
workdir="$newdir.work"
updatemanifestv2="$workdir/updatev2.manifest"
updatemanifestv3="$workdir/updatev3.manifest"
archivefiles="updatev2.manifest updatev3.manifest"
mkdir -p "$workdir"
# Generate a list of all files in the target directory.
pushd "$olddir"
if test $? -ne 0 ; then
exit 1
fi
list_files oldfiles
list_dirs olddirs
popd
pushd "$newdir"
if test $? -ne 0 ; then
exit 1
fi
if [ ! -f "precomplete" ]; then
if [ ! -f "Contents/Resources/precomplete" ]; then
notice "precomplete file is missing!"
exit 1
fi
fi
list_dirs newdirs
list_files newfiles
popd
# Add the type of update to the beginning of the update manifests.
notice ""
notice "Adding type instruction to update manifests"
> $updatemanifestv2
> $updatemanifestv3
notice " type partial"
echo "type \"partial\"" >> $updatemanifestv2
echo "type \"partial\"" >> $updatemanifestv3
notice ""
notice "Adding file patch and add instructions to update manifests"
num_oldfiles=${#oldfiles[*]}
remove_array=
num_removes=0
for ((i=0; $i<$num_oldfiles; i=$i+1)); do
f="${oldfiles[$i]}"
# If this file exists in the new directory as well, then check if it differs.
if [ -f "$newdir/$f" ]; then
if check_for_add_if_not_update "$f"; then
# The full workdir may not exist yet, so create it if necessary.
mkdir -p `dirname "$workdir/$f"`
$BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
copy_perm "$newdir/$f" "$workdir/$f"
make_add_if_not_instruction "$f" "$updatemanifestv3"
archivefiles="$archivefiles \"$f\""
continue 1
fi
if check_for_forced_update "$requested_forced_updates" "$f"; then
# The full workdir may not exist yet, so create it if necessary.
mkdir -p `dirname "$workdir/$f"`
$BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
copy_perm "$newdir/$f" "$workdir/$f"
make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3" 1
archivefiles="$archivefiles \"$f\""
continue 1
fi
if ! diff "$olddir/$f" "$newdir/$f" > /dev/null; then
# Compute both the compressed binary diff and the compressed file, and
# compare the sizes. Then choose the smaller of the two to package.
dir=$(dirname "$workdir/$f")
mkdir -p "$dir"
notice "diffing \"$f\""
# MBSDIFF_HOOK represents the communication interface with funsize and,
# if enabled, caches the intermediate patches for future use and
# compute avoidance
#
# An example of MBSDIFF_HOOK env variable could look like this:
# export MBSDIFF_HOOK="myscript.sh -A https://funsize/api -c /home/user"
# where myscript.sh has the following usage:
# myscript.sh -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] \
# PATH-FROM-URL PATH-TO-URL PATH-PATCH SERVER-URL
#
# Note: patches are bzipped stashed in funsize to gain more speed
# if service is not enabled then default to old behavior
if [ -z "$MBSDIFF_HOOK" ]; then
$MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
$BZIP2 -z9 "$workdir/$f.patch"
else
# if service enabled then check patch existence for retrieval
if $MBSDIFF_HOOK -g "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"; then
notice "file \"$f\" found in funsize, diffing skipped"
else
# if not found already - compute it and cache it for future use
$MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
$BZIP2 -z9 "$workdir/$f.patch"
$MBSDIFF_HOOK -u "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"
fi
fi
$BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
copy_perm "$newdir/$f" "$workdir/$f"
patchfile="$workdir/$f.patch.bz2"
patchsize=$(get_file_size "$patchfile")
fullsize=$(get_file_size "$workdir/$f")
if [ $patchsize -lt $fullsize ]; then
make_patch_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
mv -f "$patchfile" "$workdir/$f.patch"
rm -f "$workdir/$f"
archivefiles="$archivefiles \"$f.patch\""
else
make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
rm -f "$patchfile"
archivefiles="$archivefiles \"$f\""
fi
fi
else
# remove instructions are added after add / patch instructions for
# consistency with make_incremental_updates.py
remove_array[$num_removes]=$f
# Changed by Zotero for -e
#(( num_removes++ ))
(( ++num_removes ))
fi
done
# Newly added files
notice ""
notice "Adding file add instructions to update manifests"
num_newfiles=${#newfiles[*]}
for ((i=0; $i<$num_newfiles; i=$i+1)); do
f="${newfiles[$i]}"
# If we've already tested this file, then skip it
for ((j=0; $j<$num_oldfiles; j=$j+1)); do
if [ "$f" = "${oldfiles[j]}" ]; then
continue 2
fi
done
dir=$(dirname "$workdir/$f")
mkdir -p "$dir"
$BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
copy_perm "$newdir/$f" "$workdir/$f"
if check_for_add_if_not_update "$f"; then
make_add_if_not_instruction "$f" "$updatemanifestv3"
else
make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
fi
archivefiles="$archivefiles \"$f\""
done
notice ""
notice "Adding file remove instructions to update manifests"
for ((i=0; $i<$num_removes; i=$i+1)); do
f="${remove_array[$i]}"
notice " remove \"$f\""
echo "remove \"$f\"" >> $updatemanifestv2
echo "remove \"$f\"" >> $updatemanifestv3
done
# Add remove instructions for any dead files.
notice ""
notice "Adding file and directory remove instructions from file 'removed-files'"
append_remove_instructions "$newdir" "$updatemanifestv2" "$updatemanifestv3"
notice ""
notice "Adding directory remove instructions for directories that no longer exist"
num_olddirs=${#olddirs[*]}
for ((i=0; $i<$num_olddirs; i=$i+1)); do
f="${olddirs[$i]}"
# If this dir doesn't exist in the new directory remove it.
if [ ! -d "$newdir/$f" ]; then
notice " rmdir $f/"
echo "rmdir \"$f/\"" >> $updatemanifestv2
echo "rmdir \"$f/\"" >> $updatemanifestv3
fi
done
$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
mar_command="$MAR"
if [[ -n $MOZ_PRODUCT_VERSION ]]
then
mar_command="$mar_command -V $MOZ_PRODUCT_VERSION"
fi
if [[ -n $MOZ_CHANNEL_ID ]]
then
mar_command="$mar_command -H $MOZ_CHANNEL_ID"
fi
# Changed for Zotero -- -C is unreliable
pushd $workdir > /dev/null
mar_command="$mar_command -c output.mar"
eval "$mar_command $archivefiles"
popd > /dev/null
mv -f "$workdir/output.mar" "$archive"
# cleanup
rm -fr "$workdir"
notice ""
notice "Finished"
notice ""

View file

@ -0,0 +1,550 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shutil
import sha
from os.path import join, getsize
from stat import *
import re
import sys
import getopt
import time
import datetime
import bz2
import string
import tempfile
class PatchInfo:
""" Represents the meta-data associated with a patch
work_dir = working dir where files are stored for this patch
archive_files = list of files to include in this patch
manifestv2 = set of manifest version 2 patch instructions
manifestv3 = set of manifest version 3 patch instructions
file_exclusion_list =
files to exclude from this patch. names without slashes will be
excluded anywhere in the directory hiearchy. names with slashes
will only be excluded at that exact path
"""
def __init__(self, work_dir, file_exclusion_list, path_exclusion_list):
self.work_dir=work_dir
self.archive_files=[]
self.manifestv2=[]
self.manifestv3=[]
self.file_exclusion_list=file_exclusion_list
self.path_exclusion_list=path_exclusion_list
def append_add_instruction(self, filename):
""" Appends an add instruction for this patch.
if filename starts with distribution/extensions/.*/ this will add an
add-if instruction that will add the file if the parent directory
of the file exists. This was ported from
mozilla/tools/update-packaging/common.sh's make_add_instruction.
"""
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
if m:
# Directory immediately following extensions is used for the test
testdir = m.group(1)
print ' add-if "'+testdir+'" "'+filename+'"'
self.manifestv2.append('add-if "'+testdir+'" "'+filename+'"')
self.manifestv3.append('add-if "'+testdir+'" "'+filename+'"')
else:
print ' add "'+filename+'"'
self.manifestv2.append('add "'+filename+'"')
self.manifestv3.append('add "'+filename+'"')
def append_add_if_not_instruction(self, filename):
""" Appends an add-if-not instruction to the version 3 manifest for this patch.
This was ported from mozilla/tools/update-packaging/common.sh's
make_add_if_not_instruction.
"""
print ' add-if-not "'+filename+'" "'+filename+'"'
self.manifestv3.append('add-if-not "'+filename+'" "'+filename+'"')
def append_patch_instruction(self, filename, patchname):
""" Appends a patch instruction for this patch.
filename = file to patch
patchname = patchfile to apply to file
if filename starts with distribution/extensions/.*/ this will add a
patch-if instruction that will patch the file if the parent
directory of the file exists. This was ported from
mozilla/tools/update-packaging/common.sh's make_patch_instruction.
"""
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
if m:
testdir = m.group(1)
print ' patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"'
self.manifestv2.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
self.manifestv3.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
else:
print ' patch "'+patchname+'" "'+filename+'"'
self.manifestv2.append('patch "'+patchname+'" "'+filename+'"')
self.manifestv3.append('patch "'+patchname+'" "'+filename+'"')
def append_remove_instruction(self, filename):
""" Appends an remove instruction for this patch.
This was ported from
mozilla/tools/update-packaging/common.sh/make_remove_instruction
"""
if filename.endswith("/"):
print ' rmdir "'+filename+'"'
self.manifestv2.append('rmdir "'+filename+'"')
self.manifestv3.append('rmdir "'+filename+'"')
elif filename.endswith("/*"):
filename = filename[:-1]
print ' rmrfdir "'+filename+'"'
self.manifestv2.append('rmrfdir "'+filename+'"')
self.manifestv3.append('rmrfdir "'+filename+'"')
else:
print ' remove "'+filename+'"'
self.manifestv2.append('remove "'+filename+'"')
self.manifestv3.append('remove "'+filename+'"')
def create_manifest_files(self):
""" Create the v2 manifest file in the root of the work_dir """
manifest_file_path = os.path.join(self.work_dir,"updatev2.manifest")
manifest_file = open(manifest_file_path, "wb")
manifest_file.writelines("type \"partial\"\n")
manifest_file.writelines(string.join(self.manifestv2, '\n'))
manifest_file.writelines("\n")
manifest_file.close()
bzip_file(manifest_file_path)
self.archive_files.append('"updatev2.manifest"')
""" Create the v3 manifest file in the root of the work_dir """
manifest_file_path = os.path.join(self.work_dir,"updatev3.manifest")
manifest_file = open(manifest_file_path, "wb")
manifest_file.writelines("type \"partial\"\n")
manifest_file.writelines(string.join(self.manifestv3, '\n'))
manifest_file.writelines("\n")
manifest_file.close()
bzip_file(manifest_file_path)
self.archive_files.append('"updatev3.manifest"')
def build_marfile_entry_hash(self, root_path):
""" Iterates through the root_path, creating a MarFileEntry for each file
and directory in that path. Excludes any filenames in the file_exclusion_list
"""
mar_entry_hash = {}
filename_set = set()
dirname_set = set()
for root, dirs, files in os.walk(root_path):
for name in files:
# filename is the relative path from root directory
partial_path = root[len(root_path)+1:]
if name not in self.file_exclusion_list:
filename = os.path.join(partial_path, name)
if "/"+filename not in self.path_exclusion_list:
mar_entry_hash[filename]=MarFileEntry(root_path, filename)
filename_set.add(filename)
for name in dirs:
# dirname is the relative path from root directory
partial_path = root[len(root_path)+1:]
if name not in self.file_exclusion_list:
dirname = os.path.join(partial_path, name)
if "/"+dirname not in self.path_exclusion_list:
dirname = dirname+"/"
mar_entry_hash[dirname]=MarFileEntry(root_path, dirname)
dirname_set.add(dirname)
return mar_entry_hash, filename_set, dirname_set
class MarFileEntry:
"""Represents a file inside a Mozilla Archive Format (MAR)
abs_path = abspath to the the file
name = relative path within the mar. e.g.
foo.mar/dir/bar.txt extracted into /tmp/foo:
abs_path=/tmp/foo/dir/bar.txt
name = dir/bar.txt
"""
def __init__(self, root, name):
"""root = path the the top of the mar
name = relative path within the mar"""
self.name=name.replace("\\", "/")
self.abs_path=os.path.join(root,name)
self.sha_cache=None
def __str__(self):
return 'Name: %s FullPath: %s' %(self.name,self.abs_path)
def calc_file_sha_digest(self, filename):
""" Returns sha digest of given filename"""
file_content = open(filename, 'r').read()
return sha.new(file_content).digest()
def sha(self):
""" Returns sha digest of file repreesnted by this _marfile_entry"""
if not self.sha_cache:
self.sha_cache=self.calc_file_sha_digest(self.abs_path)
return self.sha_cache
def exec_shell_cmd(cmd):
"""Execs shell cmd and raises an exception if the cmd fails"""
if (os.system(cmd)):
raise Exception, "cmd failed "+cmd
def copy_file(src_file_abs_path, dst_file_abs_path):
""" Copies src to dst creating any parent dirs required in dst first """
dst_file_dir=os.path.dirname(dst_file_abs_path)
if not os.path.exists(dst_file_dir):
os.makedirs(dst_file_dir)
# Copy the file over
shutil.copy2(src_file_abs_path, dst_file_abs_path)
def bzip_file(filename):
""" Bzip's the file in place. The original file is replaced with a bzip'd version of itself
assumes the path is absolute"""
exec_shell_cmd('bzip2 -z9 "' + filename+'"')
os.rename(filename+".bz2",filename)
def bunzip_file(filename):
""" Bzip's the file in palce. The original file is replaced with a bunzip'd version of itself.
doesn't matter if the filename ends in .bz2 or not"""
if not filename.endswith(".bz2"):
os.rename(filename, filename+".bz2")
filename=filename+".bz2"
exec_shell_cmd('bzip2 -d "' + filename+'"')
def extract_mar(filename, work_dir):
""" Extracts the marfile intot he work_dir
assumes work_dir already exists otherwise will throw osError"""
print "Extracting "+filename+" to "+work_dir
saved_path = os.getcwd()
try:
os.chdir(work_dir)
exec_shell_cmd("mar -x "+filename)
finally:
os.chdir(saved_path)
def create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info):
""" Creates the partial patch file and manifest entry for the pair of files passed in
"""
if not (from_marfile_entry.sha(),to_marfile_entry.sha()) in shas:
print 'diffing "'+from_marfile_entry.name+'\"'
#bunzip to/from
bunzip_file(from_marfile_entry.abs_path)
bunzip_file(to_marfile_entry.abs_path)
# The patch file will be created in the working directory with the
# name of the file in the mar + .patch
patch_file_abs_path = os.path.join(patch_info.work_dir,from_marfile_entry.name+".patch")
patch_file_dir=os.path.dirname(patch_file_abs_path)
if not os.path.exists(patch_file_dir):
os.makedirs(patch_file_dir)
# Create bzip'd patch file
exec_shell_cmd("mbsdiff "+from_marfile_entry.abs_path+" "+to_marfile_entry.abs_path+" "+patch_file_abs_path)
bzip_file(patch_file_abs_path)
# Create bzip's full file
full_file_abs_path = os.path.join(patch_info.work_dir, to_marfile_entry.name)
shutil.copy2(to_marfile_entry.abs_path, full_file_abs_path)
bzip_file(full_file_abs_path)
if os.path.getsize(patch_file_abs_path) < os.path.getsize(full_file_abs_path):
# Patch is smaller than file. Remove the file and add patch to manifest
os.remove(full_file_abs_path)
file_in_manifest_name = from_marfile_entry.name+".patch"
file_in_manifest_abspath = patch_file_abs_path
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
else:
# File is smaller than patch. Remove the patch and add file to manifest
os.remove(patch_file_abs_path)
file_in_manifest_name = from_marfile_entry.name
file_in_manifest_abspath = full_file_abs_path
patch_info.append_add_instruction(file_in_manifest_name)
shas[from_marfile_entry.sha(),to_marfile_entry.sha()] = (file_in_manifest_name,file_in_manifest_abspath)
patch_info.archive_files.append('"'+file_in_manifest_name+'"')
else:
filename, src_file_abs_path = shas[from_marfile_entry.sha(),to_marfile_entry.sha()]
# We've already calculated the patch for this pair of files.
if (filename.endswith(".patch")):
# print "skipping diff: "+from_marfile_entry.name
# Patch was smaller than file - add patch instruction to manifest
file_in_manifest_name = to_marfile_entry.name+'.patch';
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
else:
# File was smaller than file - add file to manifest
file_in_manifest_name = to_marfile_entry.name
patch_info.append_add_instruction(file_in_manifest_name)
# Copy the pre-calculated file into our new patch work aread
copy_file(src_file_abs_path, os.path.join(patch_info.work_dir, file_in_manifest_name))
patch_info.archive_files.append('"'+file_in_manifest_name+'"')
def create_add_patch_for_file(to_marfile_entry, patch_info):
""" Copy the file to the working dir, add the add instruction, and add it to the list of archive files """
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
patch_info.append_add_instruction(to_marfile_entry.name)
patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
def create_add_if_not_patch_for_file(to_marfile_entry, patch_info):
""" Copy the file to the working dir, add the add-if-not instruction, and add it to the list of archive files """
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
patch_info.append_add_if_not_instruction(to_marfile_entry.name)
patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
def process_explicit_remove_files(dir_path, patch_info):
""" Looks for a 'removed-files' file in the dir_path. If the removed-files does not exist
this will throw. If found adds the removed-files
found in that file to the patch_info"""
# Windows and linux have this file at the root of the dir
list_file_path = os.path.join(dir_path, "removed-files")
if not os.path.exists(list_file_path):
list_file_path = os.path.join(dir_path, "Contents/Resources/removed-files")
if (os.path.exists(list_file_path)):
list_file = bz2.BZ2File(list_file_path,"r") # throws if doesn't exist
lines = []
for line in list_file:
lines.append(line.strip())
list_file.close()
lines.sort(reverse=True)
for line in lines:
# Exclude any blank and comment lines.
if line and not line.startswith("#"):
# Python on windows uses \ for path separators and the update
# manifests expects / for path separators on all platforms.
line = line.replace("\\", "/")
patch_info.append_remove_instruction(line)
def create_partial_patch(from_dir_path, to_dir_path, patch_filename, shas, patch_info, forced_updates, add_if_not_list):
""" Builds a partial patch by comparing the files in from_dir_path to those of to_dir_path"""
# Cannocolize the paths for safey
from_dir_path = os.path.abspath(from_dir_path)
to_dir_path = os.path.abspath(to_dir_path)
# Create a hashtable of the from and to directories
from_dir_hash,from_file_set,from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
to_dir_hash,to_file_set,to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
# Create a list of the forced updates
forced_list = forced_updates.strip().split('|')
# Require that the precomplete file is included in the complete update
if "precomplete" in to_file_set:
forced_list.append("precomplete")
elif "Contents/Resources/precomplete" in to_file_set:
forced_list.append("Contents/Resources/precomplete")
# The check with \ file separators allows tests for Mac to run on Windows
elif "Contents\Resources\precomplete" in to_file_set:
forced_list.append("Contents\Resources\precomplete")
else:
raise Exception, "missing precomplete file in: "+to_dir_path
if "removed-files" in to_file_set:
forced_list.append("removed-files")
elif "Contents/Resources/removed-files" in to_file_set:
forced_list.append("Contents/Resources/removed-files")
# The check with \ file separators allows tests for Mac to run on Windows
elif "Contents\Resources\\removed-files" in to_file_set:
forced_list.append("Contents\Resources\\removed-files")
else:
raise Exception, "missing removed-files file in: "+to_dir_path
# Files which exist in both sets need to be patched
patch_filenames = list(from_file_set.intersection(to_file_set))
patch_filenames.sort(reverse=True)
for filename in patch_filenames:
from_marfile_entry = from_dir_hash[filename]
to_marfile_entry = to_dir_hash[filename]
if os.path.basename(filename) in add_if_not_list:
# This filename is in the add if not list, explicitly add-if-not
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
elif filename in forced_list:
print 'Forcing "'+filename+'"'
# This filename is in the forced list, explicitly add
create_add_patch_for_file(to_dir_hash[filename], patch_info)
else:
if from_marfile_entry.sha() != to_marfile_entry.sha():
# Not the same - calculate a patch
create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info)
# files in to_dir not in from_dir need to added
add_filenames = list(to_file_set - from_file_set)
add_filenames.sort(reverse=True)
for filename in add_filenames:
if os.path.basename(filename) in add_if_not_list:
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
else:
create_add_patch_for_file(to_dir_hash[filename], patch_info)
# files in from_dir not in to_dir need to be removed
remove_filenames = list(from_file_set - to_file_set)
remove_filenames.sort(reverse=True)
for filename in remove_filenames:
patch_info.append_remove_instruction(from_dir_hash[filename].name)
process_explicit_remove_files(to_dir_path, patch_info)
# directories in from_dir not in to_dir need to be removed
remove_dirnames = list(from_dir_set - to_dir_set)
remove_dirnames.sort(reverse=True)
for dirname in remove_dirnames:
patch_info.append_remove_instruction(from_dir_hash[dirname].name)
# Construct the Manifest files
patch_info.create_manifest_files()
# And construct the mar
mar_cmd = 'mar -C '+patch_info.work_dir+' -c output.mar '+string.join(patch_info.archive_files, ' ')
exec_shell_cmd(mar_cmd)
# Copy mar to final destination
patch_file_dir = os.path.split(patch_filename)[0]
if not os.path.exists(patch_file_dir):
os.makedirs(patch_file_dir)
shutil.copy2(os.path.join(patch_info.work_dir,"output.mar"), patch_filename)
return patch_filename
def usage():
print "-h for help"
print "-f for patchlist_file"
def get_buildid(work_dir):
""" extracts buildid from MAR
"""
ini = '%s/application.ini' % work_dir
if not os.path.exists(ini):
ini = '%s/Contents/Resources/application.ini' % work_dir
if not os.path.exists(ini):
print 'WARNING: application.ini not found, cannot find build ID'
return ''
file = bz2.BZ2File(ini)
for line in file:
if line.find('BuildID') == 0:
return line.strip().split('=')[1]
print 'WARNING: cannot find build ID in application.ini'
return ''
def decode_filename(filepath):
""" Breaks filename/dir structure into component parts based on regex
for example: firefox-3.0b3pre.en-US.linux-i686.complete.mar
Or linux-i686/en-US/firefox-3.0b3.complete.mar
Returns dict with keys product, version, locale, platform, type
"""
try:
m = re.search(
'(?P<product>\w+)(-)(?P<version>\w+\.\w+(\.\w+){0,2})(\.)(?P<locale>.+?)(\.)(?P<platform>.+?)(\.)(?P<type>\w+)(.mar)',
os.path.basename(filepath))
return m.groupdict()
except Exception, exc:
try:
m = re.search(
'(?P<platform>.+?)\/(?P<locale>.+?)\/(?P<product>\w+)-(?P<version>\w+\.\w+)\.(?P<type>\w+).mar',
filepath)
return m.groupdict()
except:
raise Exception("could not parse filepath %s: %s" % (filepath, exc))
def create_partial_patches(patches):
""" Given the patches generates a set of partial patches"""
shas = {}
work_dir_root = None
metadata = []
try:
work_dir_root = tempfile.mkdtemp('-fastmode', 'tmp', os.getcwd())
print "Building patches using work dir: %s" % (work_dir_root)
# Iterate through every patch set in the patch file
patch_num = 1
for patch in patches:
startTime = time.time()
from_filename,to_filename,patch_filename,forced_updates = patch.split(",")
from_filename,to_filename,patch_filename = os.path.abspath(from_filename),os.path.abspath(to_filename),os.path.abspath(patch_filename)
# Each patch iteration uses its own work dir
work_dir = os.path.join(work_dir_root,str(patch_num))
os.mkdir(work_dir)
# Extract from mar into from dir
work_dir_from = os.path.join(work_dir,"from");
os.mkdir(work_dir_from)
extract_mar(from_filename,work_dir_from)
from_decoded = decode_filename(from_filename)
from_buildid = get_buildid(work_dir_from)
from_shasum = sha.sha(open(from_filename).read()).hexdigest()
from_size = str(os.path.getsize(to_filename))
# Extract to mar into to dir
work_dir_to = os.path.join(work_dir,"to")
os.mkdir(work_dir_to)
extract_mar(to_filename, work_dir_to)
to_decoded = decode_filename(from_filename)
to_buildid = get_buildid(work_dir_to)
to_shasum = sha.sha(open(to_filename).read()).hexdigest()
to_size = str(os.path.getsize(to_filename))
mar_extract_time = time.time()
partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['update.manifest','updatev2.manifest','updatev3.manifest'],[]),forced_updates,['channel-prefs.js','update-settings.ini'])
partial_buildid = to_buildid
partial_shasum = sha.sha(open(partial_filename).read()).hexdigest()
partial_size = str(os.path.getsize(partial_filename))
metadata.append({
'to_filename': os.path.basename(to_filename),
'from_filename': os.path.basename(from_filename),
'partial_filename': os.path.basename(partial_filename),
'to_buildid':to_buildid,
'from_buildid':from_buildid,
'to_sha1sum':to_shasum,
'from_sha1sum':from_shasum,
'partial_sha1sum':partial_shasum,
'to_size':to_size,
'from_size':from_size,
'partial_size':partial_size,
'to_version':to_decoded['version'],
'from_version':from_decoded['version'],
'locale':from_decoded['locale'],
'platform':from_decoded['platform'],
})
print "done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),str(len(patches)),mar_extract_time-startTime,time.time()-mar_extract_time,time.time()-startTime)
patch_num += 1
return metadata
finally:
# If we fail or get a ctrl-c during run be sure to clean up temp dir
if (work_dir_root and os.path.exists(work_dir_root)):
shutil.rmtree(work_dir_root)
def main(argv):
patchlist_file = None
try:
opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-f", "--patchlist_file"):
patchlist_file = arg
except getopt.GetoptError:
usage()
sys.exit(2)
if not patchlist_file:
usage()
sys.exit(2)
patches = []
f = open(patchlist_file, 'r')
for line in f.readlines():
patches.append(line)
f.close()
create_partial_patches(patches)
if __name__ == "__main__":
main(sys.argv[1:])

View file

@ -0,0 +1,19 @@
xulrunner/*
pingsender
translators.zip
translators.index
styles.zip
resource/schema/userdata.sql
resource/schema/triggers.sql
resource/schema/system.sql
resource/schema/repotime.txt
resource/schema/renamed-styles.json
resource/schema/engines.json
resource/schema/abbreviations.json
resource/q.js
resource/csl-validator.js
resource/concurrent-caller.js
install.rdf
deleted.txt
test/*
run-zotero.sh

View file

@ -0,0 +1,19 @@
xulrunner/*
pingsender
translators.zip
translators.index
styles.zip
resource/schema/userdata.sql
resource/schema/triggers.sql
resource/schema/system.sql
resource/schema/repotime.txt
resource/schema/renamed-styles.json
resource/schema/engines.json
resource/schema/abbreviations.json
resource/q.js
resource/csl-validator.js
resource/concurrent-caller.js
install.rdf
deleted.txt
test/*
run-zotero.sh

View file

@ -0,0 +1,49 @@
Contents/Frameworks/*
Contents/MacOS/active-update.xml
Contents/MacOS/components/
Contents/MacOS/chrome.manifest
Contents/MacOS/crashreporter.app/*
Contents/MacOS/defaults/*
Contents/MacOS/dependentlibs.list
Contents/MacOS/dictionaries/
Contents/MacOS/gmp-fake/*
Contents/MacOS/Info.plist
Contents/MacOS/js-gdb.py
Contents/MacOS/libfreebl.chk
Contents/MacOS/libnssdbm3.chk
Contents/MacOS/libsoftokn3.chk
Contents/MacOS/libmozsqlite3.dylib
Contents/MacOS/libnspr4.dylib
Contents/MacOS/libnssutil3.dylib
Contents/MacOS/libplc4.dylib
Contents/MacOS/libplds4.dylib
Contents/MacOS/libsmime3.dylib
Contents/MacOS/libssl3.dylib
Contents/MacOS/libxpcom.dylib
Contents/MacOS/LICENSE
Contents/MacOS/omni.ja
Contents/MacOS/pingsender
Contents/MacOS/platform.ini
Contents/MacOS/precomplete
Contents/MacOS/README.xulrunner
Contents/MacOS/res/*
Contents/MacOS/Resources/*
Contents/MacOS/update-settings.ini
Contents/MacOS/updater.app/Contents/MacOS/updater-bin
Contents/MacOS/updates.xml
Contents/MacOS/updates/*
Contents/MacOS/zotero-bin
Contents/Resources/translators.zip
Contents/Resources/translators.index
Contents/Resources/styles.zip
Contents/Resources/install.rdf
Contents/Resources/deleted.txt
Contents/Resources/extensions/pythonext@mozdev.org/*
Contents/Resources/extensions/zoteroMacWordIntegration@zotero.org/components/install.py
Contents/Resources/extensions/zoteroMacWordIntegration@zotero.org/components/install.pyo
Contents/Resources/extensions/zoteroMacWordIntegration@zotero.org/components/zoteroIntegrationApplication.py
Contents/Resources/extensions/zoteroMacWordIntegration@zotero.org/components/zoteroIntegrationApplication.pyo
Contents/Resources/extensions/zoteroMacWordIntegration@zotero.org/pylib/*
Contents/Resources/resource/*
Contents/Resources/chrome/zotero.jar
Contents/Resources/test/*

View file

@ -0,0 +1,46 @@
deleted.txt
gkmedias.dll
install.rdf
mozcrt19.dll
mozutils.dll
msvcp80.dll
msvcr80.dll
translators.zip
translators.index
styles.zip
Microsoft.VC80.CRT.manifest
resource/schema/userdata.sql
resource/schema/triggers.sql
resource/schema/system.sql
resource/schema/repotime.txt
resource/schema/renamed-styles.json
resource/schema/engines.json
resource/schema/abbreviations.json
resource/q.js
resource/csl-validator.js
resource/concurrent-caller.js
resource/
xulrunner/*
extensions/zoteroWinWordIntegration@zotero.org/components/zoteroWinWordIntegration.dll
extensions/zoteroWinWordIntegration@zotero.org/components-5.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-6.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-7.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-8.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-9.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-10.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-12.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-13.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-14.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-17.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-18.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-20.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-22.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-23.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-25.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-26.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-28.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-30.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-32.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-33.0/*
extensions/zoteroWinWordIntegration@zotero.org/components-35.0/*
test/*

View file

BIN
app/win/VersionInfo1.rc Normal file

Binary file not shown.

53
app/win/download-nsis-plugins Executable file
View file

@ -0,0 +1,53 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
. "$ROOT_DIR/config.sh"
#
# Turns out we need older versions from Mozilla, not the official ZIPs
#
#plugins=$(cat <<EOF
#http://nsis.sourceforge.net/mediawiki/images/d/d4/AppAssocReg-0.4.zip
#https://github.com/connectiblutz/NSIS-ApplicationID/releases/download/1.1/NSIS-ApplicationID.zip
#http://nsis.sourceforge.net/mediawiki/images/c/ca/InvokeShellVerb-1.0.zip
#http://nsis.sourceforge.net/mediawiki/images/6/6c/Shelllink.zip
#http://nsis.sourceforge.net/mediawiki/images/8/8f/UAC.zip
#EOF
#)
#
#mkdir -p nsis
#cd nsis
#for plugin in $plugins; do
# wget $plugin
#done
#
#unzip_cmd="unzip -d Plugins -j"
#for zip in AppAssocReg*.zip InvokeShellVerb*.zip; do
# $unzip_cmd $zip 'Plugins/Unicode/*.dll'
#done
#$unzip_cmd Shelllink*.zip Unicode/Plugins/ShellLink.dll
#$unzip_cmd NSIS-ApplicationID*.zip 'ReleaseUnicode/ApplicationID*'
#$unzip_cmd UAC.zip Plugins/x86-unicode/UAC.dll
#
#rm *.zip
#
#echo
#echo
#echo "Files extracted to ./nsis/Plugins -- move to ${NSIS_DIR}Plugins"
#echo
#ls -la Plugins
mkdir -p Plugins
cd Plugins
plugins="AccessControl AppAssocReg ApplicationID InvokeShellVerb ShellLink UAC"
for plugin in $plugins; do
curl https://hg.mozilla.org/mozilla-central/raw-file/052d53200cf8/other-licenses/nsis/Plugins/$plugin.dll > $plugin.dll
done
echo
echo
echo "Files downloaded to ./Plugins -- move to ${NSIS_DIR}Plugins"
echo
ls -la

Binary file not shown.

View file

@ -0,0 +1,3 @@
// CompressionMethod.cpp
#include "StdAfx.h"

View file

@ -0,0 +1,64 @@
// 7zCompressionMode.h
#ifndef __7Z_COMPRESSION_MODE_H
#define __7Z_COMPRESSION_MODE_H
#include "../../../Windows/PropVariant.h"
#include "7zMethodID.h"
namespace NArchive {
namespace N7z {
struct CProperty
{
PROPID PropID;
NWindows::NCOM::CPropVariant Value;
};
struct CMethodFull
{
CMethodID MethodID;
UInt32 NumInStreams;
UInt32 NumOutStreams;
bool IsSimpleCoder() const
{ return (NumInStreams == 1) && (NumOutStreams == 1); }
#ifdef EXCLUDE_COM
#else
CLSID EncoderClassID;
CSysString FilePath;
#endif
CObjectVector<CProperty> CoderProperties;
};
struct CBind
{
UInt32 InCoder;
UInt32 InStream;
UInt32 OutCoder;
UInt32 OutStream;
};
struct CCompressionMethodMode
{
CObjectVector<CMethodFull> Methods;
CRecordVector<CBind> Binds;
#ifdef COMPRESS_MT
UInt32 NumThreads;
#endif
bool PasswordIsDefined;
UString Password;
bool IsEmpty() const { return (Methods.IsEmpty() && !PasswordIsDefined); }
CCompressionMethodMode(): PasswordIsDefined(false)
#ifdef COMPRESS_MT
, NumThreads(1)
#endif
{}
};
}}
#endif

View file

@ -0,0 +1,443 @@
// 7zDecode.cpp
#include "StdAfx.h"
#include "7zDecode.h"
#include "../../IPassword.h"
#include "../../Common/LockedStream.h"
#include "../../Common/StreamObjects.h"
#include "../../Common/ProgressUtils.h"
#include "../../Common/LimitedStreams.h"
#include "../Common/FilterCoder.h"
#include "7zMethods.h"
#ifdef COMPRESS_LZMA
#include "../../Compress/LZMA/LZMADecoder.h"
static NArchive::N7z::CMethodID k_LZMA = { { 0x3, 0x1, 0x1 }, 3 };
#endif
#ifdef COMPRESS_PPMD
#include "../../Compress/PPMD/PPMDDecoder.h"
static NArchive::N7z::CMethodID k_PPMD = { { 0x3, 0x4, 0x1 }, 3 };
#endif
#ifdef COMPRESS_BCJ_X86
#include "../../Compress/Branch/x86.h"
static NArchive::N7z::CMethodID k_BCJ_X86 = { { 0x3, 0x3, 0x1, 0x3 }, 4 };
#endif
#ifdef COMPRESS_BCJ2
#include "../../Compress/Branch/x86_2.h"
static NArchive::N7z::CMethodID k_BCJ2 = { { 0x3, 0x3, 0x1, 0x1B }, 4 };
#endif
#ifdef COMPRESS_DEFLATE
#ifndef COMPRESS_DEFLATE_DECODER
#define COMPRESS_DEFLATE_DECODER
#endif
#endif
#ifdef COMPRESS_DEFLATE_DECODER
#include "../../Compress/Deflate/DeflateDecoder.h"
static NArchive::N7z::CMethodID k_Deflate = { { 0x4, 0x1, 0x8 }, 3 };
#endif
#ifdef COMPRESS_BZIP2
#ifndef COMPRESS_BZIP2_DECODER
#define COMPRESS_BZIP2_DECODER
#endif
#endif
#ifdef COMPRESS_BZIP2_DECODER
#include "../../Compress/BZip2/BZip2Decoder.h"
static NArchive::N7z::CMethodID k_BZip2 = { { 0x4, 0x2, 0x2 }, 3 };
#endif
#ifdef COMPRESS_COPY
#include "../../Compress/Copy/CopyCoder.h"
static NArchive::N7z::CMethodID k_Copy = { { 0x0 }, 1 };
#endif
#ifdef CRYPTO_7ZAES
#include "../../Crypto/7zAES/7zAES.h"
static NArchive::N7z::CMethodID k_7zAES = { { 0x6, 0xF1, 0x07, 0x01 }, 4 };
#endif
namespace NArchive {
namespace N7z {
static void ConvertFolderItemInfoToBindInfo(const CFolder &folder,
CBindInfoEx &bindInfo)
{
bindInfo.Clear();
int i;
for (i = 0; i < folder.BindPairs.Size(); i++)
{
NCoderMixer2::CBindPair bindPair;
bindPair.InIndex = (UInt32)folder.BindPairs[i].InIndex;
bindPair.OutIndex = (UInt32)folder.BindPairs[i].OutIndex;
bindInfo.BindPairs.Add(bindPair);
}
UInt32 outStreamIndex = 0;
for (i = 0; i < folder.Coders.Size(); i++)
{
NCoderMixer2::CCoderStreamsInfo coderStreamsInfo;
const CCoderInfo &coderInfo = folder.Coders[i];
coderStreamsInfo.NumInStreams = (UInt32)coderInfo.NumInStreams;
coderStreamsInfo.NumOutStreams = (UInt32)coderInfo.NumOutStreams;
bindInfo.Coders.Add(coderStreamsInfo);
const CAltCoderInfo &altCoderInfo = coderInfo.AltCoders.Front();
bindInfo.CoderMethodIDs.Add(altCoderInfo.MethodID);
for (UInt32 j = 0; j < coderStreamsInfo.NumOutStreams; j++, outStreamIndex++)
if (folder.FindBindPairForOutStream(outStreamIndex) < 0)
bindInfo.OutStreams.Add(outStreamIndex);
}
for (i = 0; i < folder.PackStreams.Size(); i++)
bindInfo.InStreams.Add((UInt32)folder.PackStreams[i]);
}
static bool AreCodersEqual(const NCoderMixer2::CCoderStreamsInfo &a1,
const NCoderMixer2::CCoderStreamsInfo &a2)
{
return (a1.NumInStreams == a2.NumInStreams) &&
(a1.NumOutStreams == a2.NumOutStreams);
}
static bool AreBindPairsEqual(const NCoderMixer2::CBindPair &a1, const NCoderMixer2::CBindPair &a2)
{
return (a1.InIndex == a2.InIndex) &&
(a1.OutIndex == a2.OutIndex);
}
static bool AreBindInfoExEqual(const CBindInfoEx &a1, const CBindInfoEx &a2)
{
if (a1.Coders.Size() != a2.Coders.Size())
return false;
int i;
for (i = 0; i < a1.Coders.Size(); i++)
if (!AreCodersEqual(a1.Coders[i], a2.Coders[i]))
return false;
if (a1.BindPairs.Size() != a2.BindPairs.Size())
return false;
for (i = 0; i < a1.BindPairs.Size(); i++)
if (!AreBindPairsEqual(a1.BindPairs[i], a2.BindPairs[i]))
return false;
for (i = 0; i < a1.CoderMethodIDs.Size(); i++)
if (a1.CoderMethodIDs[i] != a2.CoderMethodIDs[i])
return false;
if (a1.InStreams.Size() != a2.InStreams.Size())
return false;
if (a1.OutStreams.Size() != a2.OutStreams.Size())
return false;
return true;
}
CDecoder::CDecoder(bool multiThread)
{
#ifndef _ST_MODE
multiThread = true;
#endif
_multiThread = multiThread;
_bindInfoExPrevIsDefinded = false;
#ifndef EXCLUDE_COM
LoadMethodMap();
#endif
}
HRESULT CDecoder::Decode(IInStream *inStream,
UInt64 startPos,
const UInt64 *packSizes,
const CFolder &folderInfo,
ISequentialOutStream *outStream,
ICompressProgressInfo *compressProgress
#ifndef _NO_CRYPTO
, ICryptoGetTextPassword *getTextPassword
#endif
#ifdef COMPRESS_MT
, bool mtMode, UInt32 numThreads
#endif
)
{
CObjectVector< CMyComPtr<ISequentialInStream> > inStreams;
CLockedInStream lockedInStream;
lockedInStream.Init(inStream);
for (int j = 0; j < folderInfo.PackStreams.Size(); j++)
{
CLockedSequentialInStreamImp *lockedStreamImpSpec = new
CLockedSequentialInStreamImp;
CMyComPtr<ISequentialInStream> lockedStreamImp = lockedStreamImpSpec;
lockedStreamImpSpec->Init(&lockedInStream, startPos);
startPos += packSizes[j];
CLimitedSequentialInStream *streamSpec = new
CLimitedSequentialInStream;
CMyComPtr<ISequentialInStream> inStream = streamSpec;
streamSpec->Init(lockedStreamImp, packSizes[j]);
inStreams.Add(inStream);
}
int numCoders = folderInfo.Coders.Size();
CBindInfoEx bindInfo;
ConvertFolderItemInfoToBindInfo(folderInfo, bindInfo);
bool createNewCoders;
if (!_bindInfoExPrevIsDefinded)
createNewCoders = true;
else
createNewCoders = !AreBindInfoExEqual(bindInfo, _bindInfoExPrev);
if (createNewCoders)
{
int i;
_decoders.Clear();
// _decoders2.Clear();
_mixerCoder.Release();
if (_multiThread)
{
_mixerCoderMTSpec = new NCoderMixer2::CCoderMixer2MT;
_mixerCoder = _mixerCoderMTSpec;
_mixerCoderCommon = _mixerCoderMTSpec;
}
else
{
#ifdef _ST_MODE
_mixerCoderSTSpec = new NCoderMixer2::CCoderMixer2ST;
_mixerCoder = _mixerCoderSTSpec;
_mixerCoderCommon = _mixerCoderSTSpec;
#endif
}
_mixerCoderCommon->SetBindInfo(bindInfo);
for (i = 0; i < numCoders; i++)
{
const CCoderInfo &coderInfo = folderInfo.Coders[i];
const CAltCoderInfo &altCoderInfo = coderInfo.AltCoders.Front();
#ifndef EXCLUDE_COM
CMethodInfo methodInfo;
if (!GetMethodInfo(altCoderInfo.MethodID, methodInfo))
return E_NOTIMPL;
#endif
if (coderInfo.IsSimpleCoder())
{
CMyComPtr<ICompressCoder> decoder;
CMyComPtr<ICompressFilter> filter;
#ifdef COMPRESS_LZMA
if (altCoderInfo.MethodID == k_LZMA)
decoder = new NCompress::NLZMA::CDecoder;
#endif
#ifdef COMPRESS_PPMD
if (altCoderInfo.MethodID == k_PPMD)
decoder = new NCompress::NPPMD::CDecoder;
#endif
#ifdef COMPRESS_BCJ_X86
if (altCoderInfo.MethodID == k_BCJ_X86)
filter = new CBCJ_x86_Decoder;
#endif
#ifdef COMPRESS_DEFLATE_DECODER
if (altCoderInfo.MethodID == k_Deflate)
decoder = new NCompress::NDeflate::NDecoder::CCOMCoder;
#endif
#ifdef COMPRESS_BZIP2_DECODER
if (altCoderInfo.MethodID == k_BZip2)
decoder = new NCompress::NBZip2::CDecoder;
#endif
#ifdef COMPRESS_COPY
if (altCoderInfo.MethodID == k_Copy)
decoder = new NCompress::CCopyCoder;
#endif
#ifdef CRYPTO_7ZAES
if (altCoderInfo.MethodID == k_7zAES)
filter = new NCrypto::NSevenZ::CDecoder;
#endif
if (filter)
{
CFilterCoder *coderSpec = new CFilterCoder;
decoder = coderSpec;
coderSpec->Filter = filter;
}
#ifndef EXCLUDE_COM
if (decoder == 0)
{
RINOK(_libraries.CreateCoderSpec(methodInfo.FilePath,
methodInfo.Decoder, &decoder));
}
#endif
if (decoder == 0)
return E_NOTIMPL;
_decoders.Add((IUnknown *)decoder);
if (_multiThread)
_mixerCoderMTSpec->AddCoder(decoder);
#ifdef _ST_MODE
else
_mixerCoderSTSpec->AddCoder(decoder, false);
#endif
}
else
{
CMyComPtr<ICompressCoder2> decoder;
#ifdef COMPRESS_BCJ2
if (altCoderInfo.MethodID == k_BCJ2)
decoder = new CBCJ2_x86_Decoder;
#endif
#ifndef EXCLUDE_COM
if (decoder == 0)
{
RINOK(_libraries.CreateCoder2(methodInfo.FilePath,
methodInfo.Decoder, &decoder));
}
#endif
if (decoder == 0)
return E_NOTIMPL;
_decoders.Add((IUnknown *)decoder);
if (_multiThread)
_mixerCoderMTSpec->AddCoder2(decoder);
#ifdef _ST_MODE
else
_mixerCoderSTSpec->AddCoder2(decoder, false);
#endif
}
}
_bindInfoExPrev = bindInfo;
_bindInfoExPrevIsDefinded = true;
}
int i;
_mixerCoderCommon->ReInit();
UInt32 packStreamIndex = 0, unPackStreamIndex = 0;
UInt32 coderIndex = 0;
// UInt32 coder2Index = 0;
for (i = 0; i < numCoders; i++)
{
const CCoderInfo &coderInfo = folderInfo.Coders[i];
const CAltCoderInfo &altCoderInfo = coderInfo.AltCoders.Front();
CMyComPtr<IUnknown> &decoder = _decoders[coderIndex];
{
CMyComPtr<ICompressSetDecoderProperties2> setDecoderProperties;
HRESULT result = decoder.QueryInterface(IID_ICompressSetDecoderProperties2, &setDecoderProperties);
if (setDecoderProperties)
{
const CByteBuffer &properties = altCoderInfo.Properties;
size_t size = properties.GetCapacity();
if (size > 0xFFFFFFFF)
return E_NOTIMPL;
if (size > 0)
{
RINOK(setDecoderProperties->SetDecoderProperties2((const Byte *)properties, (UInt32)size));
}
}
}
#ifdef COMPRESS_MT
if (mtMode)
{
CMyComPtr<ICompressSetCoderMt> setCoderMt;
decoder.QueryInterface(IID_ICompressSetCoderMt, &setCoderMt);
if (setCoderMt)
{
RINOK(setCoderMt->SetNumberOfThreads(numThreads));
}
}
#endif
#ifndef _NO_CRYPTO
{
CMyComPtr<ICryptoSetPassword> cryptoSetPassword;
HRESULT result = decoder.QueryInterface(IID_ICryptoSetPassword, &cryptoSetPassword);
if (cryptoSetPassword)
{
if (getTextPassword == 0)
return E_FAIL;
CMyComBSTR password;
RINOK(getTextPassword->CryptoGetTextPassword(&password));
CByteBuffer buffer;
UString unicodePassword(password);
const UInt32 sizeInBytes = unicodePassword.Length() * 2;
buffer.SetCapacity(sizeInBytes);
for (int i = 0; i < unicodePassword.Length(); i++)
{
wchar_t c = unicodePassword[i];
((Byte *)buffer)[i * 2] = (Byte)c;
((Byte *)buffer)[i * 2 + 1] = (Byte)(c >> 8);
}
RINOK(cryptoSetPassword->CryptoSetPassword(
(const Byte *)buffer, sizeInBytes));
}
}
#endif
coderIndex++;
UInt32 numInStreams = (UInt32)coderInfo.NumInStreams;
UInt32 numOutStreams = (UInt32)coderInfo.NumOutStreams;
CRecordVector<const UInt64 *> packSizesPointers;
CRecordVector<const UInt64 *> unPackSizesPointers;
packSizesPointers.Reserve(numInStreams);
unPackSizesPointers.Reserve(numOutStreams);
UInt32 j;
for (j = 0; j < numOutStreams; j++, unPackStreamIndex++)
unPackSizesPointers.Add(&folderInfo.UnPackSizes[unPackStreamIndex]);
for (j = 0; j < numInStreams; j++, packStreamIndex++)
{
int bindPairIndex = folderInfo.FindBindPairForInStream(packStreamIndex);
if (bindPairIndex >= 0)
packSizesPointers.Add(
&folderInfo.UnPackSizes[(UInt32)folderInfo.BindPairs[bindPairIndex].OutIndex]);
else
{
int index = folderInfo.FindPackStreamArrayIndex(packStreamIndex);
if (index < 0)
return E_FAIL;
packSizesPointers.Add(&packSizes[index]);
}
}
_mixerCoderCommon->SetCoderInfo(i,
&packSizesPointers.Front(),
&unPackSizesPointers.Front());
}
UInt32 mainCoder, temp;
bindInfo.FindOutStream(bindInfo.OutStreams[0], mainCoder, temp);
if (_multiThread)
_mixerCoderMTSpec->SetProgressCoderIndex(mainCoder);
/*
else
_mixerCoderSTSpec->SetProgressCoderIndex(mainCoder);;
*/
if (numCoders == 0)
return 0;
CRecordVector<ISequentialInStream *> inStreamPointers;
inStreamPointers.Reserve(inStreams.Size());
for (i = 0; i < inStreams.Size(); i++)
inStreamPointers.Add(inStreams[i]);
ISequentialOutStream *outStreamPointer = outStream;
return _mixerCoder->Code(&inStreamPointers.Front(), NULL,
inStreams.Size(), &outStreamPointer, NULL, 1, compressProgress);
}
}}

View file

@ -0,0 +1,71 @@
// 7zDecode.h
#ifndef __7Z_DECODE_H
#define __7Z_DECODE_H
#include "../../IStream.h"
#include "../../IPassword.h"
#include "../Common/CoderMixer2.h"
#include "../Common/CoderMixer2MT.h"
#ifdef _ST_MODE
#include "../Common/CoderMixer2ST.h"
#endif
#ifndef EXCLUDE_COM
#include "../Common/CoderLoader.h"
#endif
#include "7zItem.h"
namespace NArchive {
namespace N7z {
struct CBindInfoEx: public NCoderMixer2::CBindInfo
{
CRecordVector<CMethodID> CoderMethodIDs;
void Clear()
{
CBindInfo::Clear();
CoderMethodIDs.Clear();
}
};
class CDecoder
{
#ifndef EXCLUDE_COM
CCoderLibraries _libraries;
#endif
bool _bindInfoExPrevIsDefinded;
CBindInfoEx _bindInfoExPrev;
bool _multiThread;
#ifdef _ST_MODE
NCoderMixer2::CCoderMixer2ST *_mixerCoderSTSpec;
#endif
NCoderMixer2::CCoderMixer2MT *_mixerCoderMTSpec;
NCoderMixer2::CCoderMixer2 *_mixerCoderCommon;
CMyComPtr<ICompressCoder2> _mixerCoder;
CObjectVector<CMyComPtr<IUnknown> > _decoders;
// CObjectVector<CMyComPtr<ICompressCoder2> > _decoders2;
public:
CDecoder(bool multiThread);
HRESULT Decode(IInStream *inStream,
UInt64 startPos,
const UInt64 *packSizes,
const CFolder &folder,
ISequentialOutStream *outStream,
ICompressProgressInfo *compressProgress
#ifndef _NO_CRYPTO
, ICryptoGetTextPassword *getTextPasswordSpec
#endif
#ifdef COMPRESS_MT
, bool mtMode, UInt32 numThreads
#endif
);
};
}}
#endif

View file

@ -0,0 +1,265 @@
// 7zExtract.cpp
#include "StdAfx.h"
#include "7zHandler.h"
#include "7zFolderOutStream.h"
#include "7zMethods.h"
#include "7zDecode.h"
// #include "7z1Decode.h"
#include "../../../Common/ComTry.h"
#include "../../Common/StreamObjects.h"
#include "../../Common/ProgressUtils.h"
#include "../../Common/LimitedStreams.h"
namespace NArchive {
namespace N7z {
struct CExtractFolderInfo
{
#ifdef _7Z_VOL
int VolumeIndex;
#endif
CNum FileIndex;
CNum FolderIndex;
CBoolVector ExtractStatuses;
UInt64 UnPackSize;
CExtractFolderInfo(
#ifdef _7Z_VOL
int volumeIndex,
#endif
CNum fileIndex, CNum folderIndex):
#ifdef _7Z_VOL
VolumeIndex(volumeIndex),
#endif
FileIndex(fileIndex),
FolderIndex(folderIndex),
UnPackSize(0)
{
if (fileIndex != kNumNoIndex)
{
ExtractStatuses.Reserve(1);
ExtractStatuses.Add(true);
}
};
};
STDMETHODIMP CHandler::Extract(const UInt32* indices, UInt32 numItems,
Int32 testModeSpec, IArchiveExtractCallback *extractCallbackSpec)
{
COM_TRY_BEGIN
bool testMode = (testModeSpec != 0);
CMyComPtr<IArchiveExtractCallback> extractCallback = extractCallbackSpec;
UInt64 importantTotalUnPacked = 0;
bool allFilesMode = (numItems == UInt32(-1));
if (allFilesMode)
numItems =
#ifdef _7Z_VOL
_refs.Size();
#else
_database.Files.Size();
#endif
if(numItems == 0)
return S_OK;
/*
if(_volumes.Size() != 1)
return E_FAIL;
const CVolume &volume = _volumes.Front();
const CArchiveDatabaseEx &_database = volume.Database;
IInStream *_inStream = volume.Stream;
*/
CObjectVector<CExtractFolderInfo> extractFolderInfoVector;
for(UInt32 ii = 0; ii < numItems; ii++)
{
// UInt32 fileIndex = allFilesMode ? indexIndex : indices[indexIndex];
UInt32 ref2Index = allFilesMode ? ii : indices[ii];
// const CRef2 &ref2 = _refs[ref2Index];
// for(UInt32 ri = 0; ri < ref2.Refs.Size(); ri++)
{
#ifdef _7Z_VOL
// const CRef &ref = ref2.Refs[ri];
const CRef &ref = _refs[ref2Index];
int volumeIndex = ref.VolumeIndex;
const CVolume &volume = _volumes[volumeIndex];
const CArchiveDatabaseEx &database = volume.Database;
UInt32 fileIndex = ref.ItemIndex;
#else
const CArchiveDatabaseEx &database = _database;
UInt32 fileIndex = ref2Index;
#endif
CNum folderIndex = database.FileIndexToFolderIndexMap[fileIndex];
if (folderIndex == kNumNoIndex)
{
extractFolderInfoVector.Add(CExtractFolderInfo(
#ifdef _7Z_VOL
volumeIndex,
#endif
fileIndex, kNumNoIndex));
continue;
}
if (extractFolderInfoVector.IsEmpty() ||
folderIndex != extractFolderInfoVector.Back().FolderIndex
#ifdef _7Z_VOL
|| volumeIndex != extractFolderInfoVector.Back().VolumeIndex
#endif
)
{
extractFolderInfoVector.Add(CExtractFolderInfo(
#ifdef _7Z_VOL
volumeIndex,
#endif
kNumNoIndex, folderIndex));
const CFolder &folderInfo = database.Folders[folderIndex];
UInt64 unPackSize = folderInfo.GetUnPackSize();
importantTotalUnPacked += unPackSize;
extractFolderInfoVector.Back().UnPackSize = unPackSize;
}
CExtractFolderInfo &efi = extractFolderInfoVector.Back();
// const CFolderInfo &folderInfo = m_dam_Folders[folderIndex];
CNum startIndex = database.FolderStartFileIndex[folderIndex];
for (CNum index = efi.ExtractStatuses.Size();
index <= fileIndex - startIndex; index++)
{
// UInt64 unPackSize = _database.Files[startIndex + index].UnPackSize;
// Count partial_folder_size
// efi.UnPackSize += unPackSize;
// importantTotalUnPacked += unPackSize;
efi.ExtractStatuses.Add(index == fileIndex - startIndex);
}
}
}
extractCallback->SetTotal(importantTotalUnPacked);
CDecoder decoder(
#ifdef _ST_MODE
false
#else
true
#endif
);
// CDecoder1 decoder;
UInt64 currentImportantTotalUnPacked = 0;
UInt64 totalFolderUnPacked;
for(int i = 0; i < extractFolderInfoVector.Size(); i++,
currentImportantTotalUnPacked += totalFolderUnPacked)
{
const CExtractFolderInfo &efi = extractFolderInfoVector[i];
totalFolderUnPacked = efi.UnPackSize;
RINOK(extractCallback->SetCompleted(&currentImportantTotalUnPacked));
CFolderOutStream *folderOutStream = new CFolderOutStream;
CMyComPtr<ISequentialOutStream> outStream(folderOutStream);
#ifdef _7Z_VOL
const CVolume &volume = _volumes[efi.VolumeIndex];
const CArchiveDatabaseEx &database = volume.Database;
#else
const CArchiveDatabaseEx &database = _database;
#endif
CNum startIndex;
if (efi.FileIndex != kNumNoIndex)
startIndex = efi.FileIndex;
else
startIndex = database.FolderStartFileIndex[efi.FolderIndex];
HRESULT result = folderOutStream->Init(&database,
#ifdef _7Z_VOL
volume.StartRef2Index,
#else
0,
#endif
startIndex,
&efi.ExtractStatuses, extractCallback, testMode);
RINOK(result);
if (efi.FileIndex != kNumNoIndex)
continue;
CNum folderIndex = efi.FolderIndex;
const CFolder &folderInfo = database.Folders[folderIndex];
CLocalProgress *localProgressSpec = new CLocalProgress;
CMyComPtr<ICompressProgressInfo> progress = localProgressSpec;
localProgressSpec->Init(extractCallback, false);
CLocalCompressProgressInfo *localCompressProgressSpec =
new CLocalCompressProgressInfo;
CMyComPtr<ICompressProgressInfo> compressProgress = localCompressProgressSpec;
localCompressProgressSpec->Init(progress, NULL, &currentImportantTotalUnPacked);
CNum packStreamIndex = database.FolderStartPackStreamIndex[folderIndex];
UInt64 folderStartPackPos = database.GetFolderStreamPos(folderIndex, 0);
#ifndef _NO_CRYPTO
CMyComPtr<ICryptoGetTextPassword> getTextPassword;
if (extractCallback)
extractCallback.QueryInterface(IID_ICryptoGetTextPassword, &getTextPassword);
#endif
try
{
HRESULT result = decoder.Decode(
#ifdef _7Z_VOL
volume.Stream,
#else
_inStream,
#endif
folderStartPackPos,
&database.PackSizes[packStreamIndex],
folderInfo,
outStream,
compressProgress
#ifndef _NO_CRYPTO
, getTextPassword
#endif
#ifdef COMPRESS_MT
, true, _numThreads
#endif
);
if (result == S_FALSE)
{
RINOK(folderOutStream->FlushCorrupted(NArchive::NExtract::NOperationResult::kDataError));
continue;
}
if (result == E_NOTIMPL)
{
RINOK(folderOutStream->FlushCorrupted(NArchive::NExtract::NOperationResult::kUnSupportedMethod));
continue;
}
if (result != S_OK)
return result;
if (folderOutStream->WasWritingFinished() != S_OK)
{
RINOK(folderOutStream->FlushCorrupted(NArchive::NExtract::NOperationResult::kDataError));
continue;
}
}
catch(...)
{
RINOK(folderOutStream->FlushCorrupted(NArchive::NExtract::NOperationResult::kDataError));
continue;
}
}
return S_OK;
COM_TRY_END
}
}}

View file

@ -0,0 +1,161 @@
// 7zFolderOutStream.cpp
#include "StdAfx.h"
#include "7zFolderOutStream.h"
namespace NArchive {
namespace N7z {
CFolderOutStream::CFolderOutStream()
{
_outStreamWithHashSpec = new COutStreamWithCRC;
_outStreamWithHash = _outStreamWithHashSpec;
}
HRESULT CFolderOutStream::Init(
const CArchiveDatabaseEx *archiveDatabase,
UInt32 ref2Offset,
UInt32 startIndex,
const CBoolVector *extractStatuses,
IArchiveExtractCallback *extractCallback,
bool testMode)
{
_archiveDatabase = archiveDatabase;
_ref2Offset = ref2Offset;
_startIndex = startIndex;
_extractStatuses = extractStatuses;
_extractCallback = extractCallback;
_testMode = testMode;
_currentIndex = 0;
_fileIsOpen = false;
return WriteEmptyFiles();
}
HRESULT CFolderOutStream::OpenFile()
{
Int32 askMode;
if((*_extractStatuses)[_currentIndex])
askMode = _testMode ?
NArchive::NExtract::NAskMode::kTest :
NArchive::NExtract::NAskMode::kExtract;
else
askMode = NArchive::NExtract::NAskMode::kSkip;
CMyComPtr<ISequentialOutStream> realOutStream;
UInt32 index = _startIndex + _currentIndex;
RINOK(_extractCallback->GetStream(_ref2Offset + index, &realOutStream, askMode));
_outStreamWithHashSpec->Init(realOutStream);
if (askMode == NArchive::NExtract::NAskMode::kExtract &&
(!realOutStream))
{
const CFileItem &fileInfo = _archiveDatabase->Files[index];
if (!fileInfo.IsAnti && !fileInfo.IsDirectory)
askMode = NArchive::NExtract::NAskMode::kSkip;
}
return _extractCallback->PrepareOperation(askMode);
}
HRESULT CFolderOutStream::WriteEmptyFiles()
{
for(;_currentIndex < _extractStatuses->Size(); _currentIndex++)
{
UInt32 index = _startIndex + _currentIndex;
const CFileItem &fileInfo = _archiveDatabase->Files[index];
if (!fileInfo.IsAnti && !fileInfo.IsDirectory && fileInfo.UnPackSize != 0)
return S_OK;
RINOK(OpenFile());
RINOK(_extractCallback->SetOperationResult(
NArchive::NExtract::NOperationResult::kOK));
_outStreamWithHashSpec->ReleaseStream();
}
return S_OK;
}
STDMETHODIMP CFolderOutStream::Write(const void *data,
UInt32 size, UInt32 *processedSize)
{
UInt32 realProcessedSize = 0;
while(_currentIndex < _extractStatuses->Size())
{
if (_fileIsOpen)
{
UInt32 index = _startIndex + _currentIndex;
const CFileItem &fileInfo = _archiveDatabase->Files[index];
UInt64 fileSize = fileInfo.UnPackSize;
UInt32 numBytesToWrite = (UInt32)MyMin(fileSize - _filePos,
UInt64(size - realProcessedSize));
UInt32 processedSizeLocal;
RINOK(_outStreamWithHash->Write((const Byte *)data + realProcessedSize,
numBytesToWrite, &processedSizeLocal));
_filePos += processedSizeLocal;
realProcessedSize += processedSizeLocal;
if (_filePos == fileSize)
{
bool digestsAreEqual;
if (fileInfo.IsFileCRCDefined)
digestsAreEqual = fileInfo.FileCRC == _outStreamWithHashSpec->GetCRC();
else
digestsAreEqual = true;
RINOK(_extractCallback->SetOperationResult(
digestsAreEqual ?
NArchive::NExtract::NOperationResult::kOK :
NArchive::NExtract::NOperationResult::kCRCError));
_outStreamWithHashSpec->ReleaseStream();
_fileIsOpen = false;
_currentIndex++;
}
if (realProcessedSize == size)
{
if (processedSize != NULL)
*processedSize = realProcessedSize;
return WriteEmptyFiles();
}
}
else
{
RINOK(OpenFile());
_fileIsOpen = true;
_filePos = 0;
}
}
if (processedSize != NULL)
*processedSize = size;
return S_OK;
}
HRESULT CFolderOutStream::FlushCorrupted(Int32 resultEOperationResult)
{
while(_currentIndex < _extractStatuses->Size())
{
if (_fileIsOpen)
{
RINOK(_extractCallback->SetOperationResult(resultEOperationResult));
_outStreamWithHashSpec->ReleaseStream();
_fileIsOpen = false;
_currentIndex++;
}
else
{
RINOK(OpenFile());
_fileIsOpen = true;
}
}
return S_OK;
}
HRESULT CFolderOutStream::WasWritingFinished()
{
if (_currentIndex == _extractStatuses->Size())
return S_OK;
return E_FAIL;
}
}}

View file

@ -0,0 +1,57 @@
// 7zFolderOutStream.h
#ifndef __7Z_FOLDEROUTSTREAM_H
#define __7Z_FOLDEROUTSTREAM_H
#include "7zIn.h"
#include "../../IStream.h"
#include "../IArchive.h"
#include "../Common/OutStreamWithCRC.h"
namespace NArchive {
namespace N7z {
class CFolderOutStream:
public ISequentialOutStream,
public CMyUnknownImp
{
public:
MY_UNKNOWN_IMP
CFolderOutStream();
STDMETHOD(Write)(const void *data, UInt32 size, UInt32 *processedSize);
private:
COutStreamWithCRC *_outStreamWithHashSpec;
CMyComPtr<ISequentialOutStream> _outStreamWithHash;
const CArchiveDatabaseEx *_archiveDatabase;
const CBoolVector *_extractStatuses;
UInt32 _startIndex;
UInt32 _ref2Offset;
int _currentIndex;
// UInt64 _currentDataPos;
CMyComPtr<IArchiveExtractCallback> _extractCallback;
bool _testMode;
bool _fileIsOpen;
UInt64 _filePos;
HRESULT OpenFile();
HRESULT WriteEmptyFiles();
public:
HRESULT Init(
const CArchiveDatabaseEx *archiveDatabase,
UInt32 ref2Offset,
UInt32 startIndex,
const CBoolVector *extractStatuses,
IArchiveExtractCallback *extractCallback,
bool testMode);
HRESULT FlushCorrupted(Int32 resultEOperationResult);
HRESULT WasWritingFinished();
};
}}
#endif

View file

@ -0,0 +1,757 @@
// 7zHandler.cpp
#include "StdAfx.h"
#include "7zHandler.h"
#include "7zProperties.h"
#include "../../../Common/IntToString.h"
#include "../../../Common/ComTry.h"
#include "../../../Windows/Defs.h"
#include "../Common/ItemNameUtils.h"
#ifdef _7Z_VOL
#include "../Common/MultiStream.h"
#endif
#ifdef __7Z_SET_PROPERTIES
#ifdef EXTRACT_ONLY
#include "../Common/ParseProperties.h"
#endif
#endif
using namespace NWindows;
namespace NArchive {
namespace N7z {
CHandler::CHandler()
{
#ifdef COMPRESS_MT
_numThreads = NWindows::NSystem::GetNumberOfProcessors();
#endif
#ifndef EXTRACT_ONLY
Init();
#endif
#ifndef EXCLUDE_COM
LoadMethodMap();
#endif
}
STDMETHODIMP CHandler::GetNumberOfItems(UInt32 *numItems)
{
COM_TRY_BEGIN
*numItems =
#ifdef _7Z_VOL
_refs.Size();
#else
*numItems = _database.Files.Size();
#endif
return S_OK;
COM_TRY_END
}
STDMETHODIMP CHandler::GetArchiveProperty(PROPID propID, PROPVARIANT *value)
{
value->vt = VT_EMPTY;
return S_OK;
}
#ifdef _SFX
STDMETHODIMP CHandler::GetNumberOfProperties(UInt32 *numProperties)
{
return E_NOTIMPL;
}
STDMETHODIMP CHandler::GetPropertyInfo(UInt32 index,
BSTR *name, PROPID *propID, VARTYPE *varType)
{
return E_NOTIMPL;
}
#endif
STDMETHODIMP CHandler::GetNumberOfArchiveProperties(UInt32 *numProperties)
{
*numProperties = 0;
return S_OK;
}
STDMETHODIMP CHandler::GetArchivePropertyInfo(UInt32 index,
BSTR *name, PROPID *propID, VARTYPE *varType)
{
return E_NOTIMPL;
}
static void MySetFileTime(bool timeDefined, FILETIME unixTime,
NWindows::NCOM::CPropVariant &propVariant)
{
if (timeDefined)
propVariant = unixTime;
}
/*
inline static wchar_t GetHex(Byte value)
{
return (value < 10) ? ('0' + value) : ('A' + (value - 10));
}
static UString ConvertBytesToHexString(const Byte *data, UInt32 size)
{
UString result;
for (UInt32 i = 0; i < size; i++)
{
Byte b = data[i];
result += GetHex(b >> 4);
result += GetHex(b & 0xF);
}
return result;
}
*/
#ifndef _SFX
static UString ConvertUInt32ToString(UInt32 value)
{
wchar_t buffer[32];
ConvertUInt64ToString(value, buffer);
return buffer;
}
static UString GetStringForSizeValue(UInt32 value)
{
for (int i = 31; i >= 0; i--)
if ((UInt32(1) << i) == value)
return ConvertUInt32ToString(i);
UString result;
if (value % (1 << 20) == 0)
{
result += ConvertUInt32ToString(value >> 20);
result += L"m";
}
else if (value % (1 << 10) == 0)
{
result += ConvertUInt32ToString(value >> 10);
result += L"k";
}
else
{
result += ConvertUInt32ToString(value);
result += L"b";
}
return result;
}
static CMethodID k_Copy = { { 0x0 }, 1 };
static CMethodID k_LZMA = { { 0x3, 0x1, 0x1 }, 3 };
static CMethodID k_BCJ = { { 0x3, 0x3, 0x1, 0x3 }, 4 };
static CMethodID k_BCJ2 = { { 0x3, 0x3, 0x1, 0x1B }, 4 };
static CMethodID k_PPMD = { { 0x3, 0x4, 0x1 }, 3 };
static CMethodID k_Deflate = { { 0x4, 0x1, 0x8 }, 3 };
static CMethodID k_BZip2 = { { 0x4, 0x2, 0x2 }, 3 };
static inline char GetHex(Byte value)
{
return (value < 10) ? ('0' + value) : ('A' + (value - 10));
}
static inline UString GetHex2(Byte value)
{
UString result;
result += GetHex(value >> 4);
result += GetHex(value & 0xF);
return result;
}
#endif
static inline UInt32 GetUInt32FromMemLE(const Byte *p)
{
return p[0] | (((UInt32)p[1]) << 8) | (((UInt32)p[2]) << 16) | (((UInt32)p[3]) << 24);
}
STDMETHODIMP CHandler::GetProperty(UInt32 index, PROPID propID, PROPVARIANT *value)
{
COM_TRY_BEGIN
NWindows::NCOM::CPropVariant propVariant;
/*
const CRef2 &ref2 = _refs[index];
if (ref2.Refs.IsEmpty())
return E_FAIL;
const CRef &ref = ref2.Refs.Front();
*/
#ifdef _7Z_VOL
const CRef &ref = _refs[index];
const CVolume &volume = _volumes[ref.VolumeIndex];
const CArchiveDatabaseEx &_database = volume.Database;
UInt32 index2 = ref.ItemIndex;
const CFileItem &item = _database.Files[index2];
#else
const CFileItem &item = _database.Files[index];
UInt32 index2 = index;
#endif
switch(propID)
{
case kpidPath:
{
if (!item.Name.IsEmpty())
propVariant = NItemName::GetOSName(item.Name);
break;
}
case kpidIsFolder:
propVariant = item.IsDirectory;
break;
case kpidSize:
{
propVariant = item.UnPackSize;
// propVariant = ref2.UnPackSize;
break;
}
case kpidPosition:
{
/*
if (ref2.Refs.Size() > 1)
propVariant = ref2.StartPos;
else
*/
if (item.IsStartPosDefined)
propVariant = item.StartPos;
break;
}
case kpidPackedSize:
{
// propVariant = ref2.PackSize;
{
CNum folderIndex = _database.FileIndexToFolderIndexMap[index2];
if (folderIndex != kNumNoIndex)
{
if (_database.FolderStartFileIndex[folderIndex] == (CNum)index2)
propVariant = _database.GetFolderFullPackSize(folderIndex);
/*
else
propVariant = UInt64(0);
*/
}
else
propVariant = UInt64(0);
}
break;
}
case kpidLastAccessTime:
MySetFileTime(item.IsLastAccessTimeDefined, item.LastAccessTime, propVariant);
break;
case kpidCreationTime:
MySetFileTime(item.IsCreationTimeDefined, item.CreationTime, propVariant);
break;
case kpidLastWriteTime:
MySetFileTime(item.IsLastWriteTimeDefined, item.LastWriteTime, propVariant);
break;
case kpidAttributes:
if (item.AreAttributesDefined)
propVariant = item.Attributes;
break;
case kpidCRC:
if (item.IsFileCRCDefined)
propVariant = item.FileCRC;
break;
#ifndef _SFX
case kpidMethod:
{
CNum folderIndex = _database.FileIndexToFolderIndexMap[index2];
if (folderIndex != kNumNoIndex)
{
const CFolder &folderInfo = _database.Folders[folderIndex];
UString methodsString;
for (int i = folderInfo.Coders.Size() - 1; i >= 0; i--)
{
const CCoderInfo &coderInfo = folderInfo.Coders[i];
if (!methodsString.IsEmpty())
methodsString += L' ';
CMethodInfo methodInfo;
bool methodIsKnown;
for (int j = 0; j < coderInfo.AltCoders.Size(); j++)
{
if (j > 0)
methodsString += L"|";
const CAltCoderInfo &altCoderInfo = coderInfo.AltCoders[j];
UString methodName;
#ifdef NO_REGISTRY
methodIsKnown = true;
if (altCoderInfo.MethodID == k_Copy)
methodName = L"Copy";
else if (altCoderInfo.MethodID == k_LZMA)
methodName = L"LZMA";
else if (altCoderInfo.MethodID == k_BCJ)
methodName = L"BCJ";
else if (altCoderInfo.MethodID == k_BCJ2)
methodName = L"BCJ2";
else if (altCoderInfo.MethodID == k_PPMD)
methodName = L"PPMD";
else if (altCoderInfo.MethodID == k_Deflate)
methodName = L"Deflate";
else if (altCoderInfo.MethodID == k_BZip2)
methodName = L"BZip2";
else
methodIsKnown = false;
#else
methodIsKnown = GetMethodInfo(
altCoderInfo.MethodID, methodInfo);
methodName = methodInfo.Name;
#endif
if (methodIsKnown)
{
methodsString += methodName;
if (altCoderInfo.MethodID == k_LZMA)
{
if (altCoderInfo.Properties.GetCapacity() >= 5)
{
methodsString += L":";
UInt32 dicSize = GetUInt32FromMemLE(
((const Byte *)altCoderInfo.Properties + 1));
methodsString += GetStringForSizeValue(dicSize);
}
}
else if (altCoderInfo.MethodID == k_PPMD)
{
if (altCoderInfo.Properties.GetCapacity() >= 5)
{
Byte order = *(const Byte *)altCoderInfo.Properties;
methodsString += L":o";
methodsString += ConvertUInt32ToString(order);
methodsString += L":mem";
UInt32 dicSize = GetUInt32FromMemLE(
((const Byte *)altCoderInfo.Properties + 1));
methodsString += GetStringForSizeValue(dicSize);
}
}
else
{
if (altCoderInfo.Properties.GetCapacity() > 0)
{
methodsString += L":[";
for (size_t bi = 0; bi < altCoderInfo.Properties.GetCapacity(); bi++)
{
if (bi > 2 && bi + 1 < altCoderInfo.Properties.GetCapacity())
{
methodsString += L"..";
break;
}
else
methodsString += GetHex2(altCoderInfo.Properties[bi]);
}
methodsString += L"]";
}
}
}
else
{
methodsString += altCoderInfo.MethodID.ConvertToString();
}
}
}
propVariant = methodsString;
}
}
break;
case kpidBlock:
{
CNum folderIndex = _database.FileIndexToFolderIndexMap[index2];
if (folderIndex != kNumNoIndex)
propVariant = (UInt32)folderIndex;
}
break;
case kpidPackedSize0:
case kpidPackedSize1:
case kpidPackedSize2:
case kpidPackedSize3:
case kpidPackedSize4:
{
CNum folderIndex = _database.FileIndexToFolderIndexMap[index2];
if (folderIndex != kNumNoIndex)
{
const CFolder &folderInfo = _database.Folders[folderIndex];
if (_database.FolderStartFileIndex[folderIndex] == (CNum)index2 &&
folderInfo.PackStreams.Size() > (int)(propID - kpidPackedSize0))
{
propVariant = _database.GetFolderPackStreamSize(folderIndex, propID - kpidPackedSize0);
}
else
propVariant = UInt64(0);
}
else
propVariant = UInt64(0);
}
break;
#endif
case kpidIsAnti:
propVariant = item.IsAnti;
break;
}
propVariant.Detach(value);
return S_OK;
COM_TRY_END
}
static const wchar_t *kExt = L"7z";
static const wchar_t *kAfterPart = L".7z";
#ifdef _7Z_VOL
class CVolumeName
{
bool _first;
UString _unchangedPart;
UString _changedPart;
UString _afterPart;
public:
bool InitName(const UString &name)
{
_first = true;
int dotPos = name.ReverseFind('.');
UString basePart = name;
if (dotPos >= 0)
{
UString ext = name.Mid(dotPos + 1);
if (ext.CompareNoCase(kExt)==0 ||
ext.CompareNoCase(L"EXE") == 0)
{
_afterPart = kAfterPart;
basePart = name.Left(dotPos);
}
}
int numLetters = 1;
bool splitStyle = false;
if (basePart.Right(numLetters) == L"1")
{
while (numLetters < basePart.Length())
{
if (basePart[basePart.Length() - numLetters - 1] != '0')
break;
numLetters++;
}
}
else
return false;
_unchangedPart = basePart.Left(basePart.Length() - numLetters);
_changedPart = basePart.Right(numLetters);
return true;
}
UString GetNextName()
{
UString newName;
// if (_newStyle || !_first)
{
int i;
int numLetters = _changedPart.Length();
for (i = numLetters - 1; i >= 0; i--)
{
wchar_t c = _changedPart[i];
if (c == L'9')
{
c = L'0';
newName = c + newName;
if (i == 0)
newName = UString(L'1') + newName;
continue;
}
c++;
newName = UString(c) + newName;
i--;
for (; i >= 0; i--)
newName = _changedPart[i] + newName;
break;
}
_changedPart = newName;
}
_first = false;
return _unchangedPart + _changedPart + _afterPart;
}
};
#endif
STDMETHODIMP CHandler::Open(IInStream *stream,
const UInt64 *maxCheckStartPosition,
IArchiveOpenCallback *openArchiveCallback)
{
COM_TRY_BEGIN
Close();
#ifndef _SFX
_fileInfoPopIDs.Clear();
#endif
try
{
CMyComPtr<IArchiveOpenCallback> openArchiveCallbackTemp = openArchiveCallback;
#ifdef _7Z_VOL
CVolumeName seqName;
CMyComPtr<IArchiveOpenVolumeCallback> openVolumeCallback;
#endif
#ifndef _NO_CRYPTO
CMyComPtr<ICryptoGetTextPassword> getTextPassword;
if (openArchiveCallback)
{
openArchiveCallbackTemp.QueryInterface(
IID_ICryptoGetTextPassword, &getTextPassword);
}
#endif
#ifdef _7Z_VOL
if (openArchiveCallback)
{
openArchiveCallbackTemp.QueryInterface(IID_IArchiveOpenVolumeCallback, &openVolumeCallback);
}
while(true)
{
CMyComPtr<IInStream> inStream;
if (!_volumes.IsEmpty())
{
if (!openVolumeCallback)
break;
if(_volumes.Size() == 1)
{
UString baseName;
{
NCOM::CPropVariant propVariant;
RINOK(openVolumeCallback->GetProperty(kpidName, &propVariant));
if (propVariant.vt != VT_BSTR)
break;
baseName = propVariant.bstrVal;
}
seqName.InitName(baseName);
}
UString fullName = seqName.GetNextName();
HRESULT result = openVolumeCallback->GetStream(fullName, &inStream);
if (result == S_FALSE)
break;
if (result != S_OK)
return result;
if (!stream)
break;
}
else
inStream = stream;
CInArchive archive;
RINOK(archive.Open(inStream, maxCheckStartPosition));
_volumes.Add(CVolume());
CVolume &volume = _volumes.Back();
CArchiveDatabaseEx &database = volume.Database;
volume.Stream = inStream;
volume.StartRef2Index = _refs.Size();
HRESULT result = archive.ReadDatabase(database
#ifndef _NO_CRYPTO
, getTextPassword
#endif
);
if (result != S_OK)
{
_volumes.Clear();
return result;
}
database.Fill();
for(int i = 0; i < database.Files.Size(); i++)
{
CRef refNew;
refNew.VolumeIndex = _volumes.Size() - 1;
refNew.ItemIndex = i;
_refs.Add(refNew);
/*
const CFileItem &file = database.Files[i];
int j;
*/
/*
for (j = _refs.Size() - 1; j >= 0; j--)
{
CRef2 &ref2 = _refs[j];
const CRef &ref = ref2.Refs.Back();
const CVolume &volume2 = _volumes[ref.VolumeIndex];
const CArchiveDatabaseEx &database2 = volume2.Database;
const CFileItem &file2 = database2.Files[ref.ItemIndex];
if (file2.Name.CompareNoCase(file.Name) == 0)
{
if (!file.IsStartPosDefined)
continue;
if (file.StartPos != ref2.StartPos + ref2.UnPackSize)
continue;
ref2.Refs.Add(refNew);
break;
}
}
*/
/*
j = -1;
if (j < 0)
{
CRef2 ref2New;
ref2New.Refs.Add(refNew);
j = _refs.Add(ref2New);
}
CRef2 &ref2 = _refs[j];
ref2.UnPackSize += file.UnPackSize;
ref2.PackSize += database.GetFilePackSize(i);
if (ref2.Refs.Size() == 1 && file.IsStartPosDefined)
ref2.StartPos = file.StartPos;
*/
}
if (database.Files.Size() != 1)
break;
const CFileItem &file = database.Files.Front();
if (!file.IsStartPosDefined)
break;
}
#else
CInArchive archive;
RINOK(archive.Open(stream, maxCheckStartPosition));
HRESULT result = archive.ReadDatabase(_database
#ifndef _NO_CRYPTO
, getTextPassword
#endif
);
RINOK(result);
_database.Fill();
_inStream = stream;
#endif
}
catch(...)
{
Close();
return S_FALSE;
}
// _inStream = stream;
#ifndef _SFX
FillPopIDs();
#endif
return S_OK;
COM_TRY_END
}
STDMETHODIMP CHandler::Close()
{
COM_TRY_BEGIN
#ifdef _7Z_VOL
_volumes.Clear();
_refs.Clear();
#else
_inStream.Release();
_database.Clear();
#endif
return S_OK;
COM_TRY_END
}
#ifdef _7Z_VOL
STDMETHODIMP CHandler::GetStream(UInt32 index, ISequentialInStream **stream)
{
if (index != 0)
return E_INVALIDARG;
*stream = 0;
CMultiStream *streamSpec = new CMultiStream;
CMyComPtr<ISequentialInStream> streamTemp = streamSpec;
UInt64 pos = 0;
const UString *fileName;
for (int i = 0; i < _refs.Size(); i++)
{
const CRef &ref = _refs[i];
const CVolume &volume = _volumes[ref.VolumeIndex];
const CArchiveDatabaseEx &database = volume.Database;
const CFileItem &file = database.Files[ref.ItemIndex];
if (i == 0)
fileName = &file.Name;
else
if (fileName->Compare(file.Name) != 0)
return S_FALSE;
if (!file.IsStartPosDefined)
return S_FALSE;
if (file.StartPos != pos)
return S_FALSE;
CNum folderIndex = database.FileIndexToFolderIndexMap[ref.ItemIndex];
if (folderIndex == kNumNoIndex)
{
if (file.UnPackSize != 0)
return E_FAIL;
continue;
}
if (database.NumUnPackStreamsVector[folderIndex] != 1)
return S_FALSE;
const CFolder &folder = database.Folders[folderIndex];
if (folder.Coders.Size() != 1)
return S_FALSE;
const CCoderInfo &coder = folder.Coders.Front();
if (coder.NumInStreams != 1 || coder.NumOutStreams != 1)
return S_FALSE;
const CAltCoderInfo &altCoder = coder.AltCoders.Front();
if (altCoder.MethodID.IDSize != 1 || altCoder.MethodID.ID[0] != 0)
return S_FALSE;
pos += file.UnPackSize;
CMultiStream::CSubStreamInfo subStreamInfo;
subStreamInfo.Stream = volume.Stream;
subStreamInfo.Pos = database.GetFolderStreamPos(folderIndex, 0);
subStreamInfo.Size = file.UnPackSize;
streamSpec->Streams.Add(subStreamInfo);
}
streamSpec->Init();
*stream = streamTemp.Detach();
return S_OK;
}
#endif
#ifdef __7Z_SET_PROPERTIES
#ifdef EXTRACT_ONLY
STDMETHODIMP CHandler::SetProperties(const wchar_t **names, const PROPVARIANT *values, Int32 numProperties)
{
COM_TRY_BEGIN
const UInt32 numProcessors = NSystem::GetNumberOfProcessors();
_numThreads = numProcessors;
for (int i = 0; i < numProperties; i++)
{
UString name = names[i];
name.MakeUpper();
if (name.IsEmpty())
return E_INVALIDARG;
const PROPVARIANT &value = values[i];
UInt32 number;
int index = ParseStringToUInt32(name, number);
if (index == 0)
{
if(name.Left(2).CompareNoCase(L"MT") == 0)
{
RINOK(ParseMtProp(name.Mid(2), value, numProcessors, _numThreads));
continue;
}
else
return E_INVALIDARG;
}
}
return S_OK;
COM_TRY_END
}
#endif
#endif
}}

View file

@ -0,0 +1,234 @@
// 7z/Handler.h
#ifndef __7Z_HANDLER_H
#define __7Z_HANDLER_H
#include "../IArchive.h"
#include "7zIn.h"
#include "7zCompressionMode.h"
#ifndef _SFX
#include "7zMethods.h"
#endif
#ifdef COMPRESS_MT
#include "../../../Windows/System.h"
#endif
namespace NArchive {
namespace N7z {
#ifdef _7Z_VOL
struct CRef
{
int VolumeIndex;
int ItemIndex;
};
/*
struct CRef2
{
CRecordVector<CRef> Refs;
UInt64 UnPackSize;
UInt64 PackSize;
UInt64 StartPos;
CRef2(): UnPackSize(0), PackSize(0), StartPos(0) {}
};
*/
struct CVolume
{
int StartRef2Index;
CMyComPtr<IInStream> Stream;
CArchiveDatabaseEx Database;
};
#endif
#ifndef EXTRACT_ONLY
struct COneMethodInfo
{
CObjectVector<CProperty> CoderProperties;
UString MethodName;
};
#endif
// {23170F69-40C1-278A-1000-000110070000}
DEFINE_GUID(CLSID_CFormat7z,
0x23170F69, 0x40C1, 0x278A, 0x10, 0x00, 0x00, 0x01, 0x10, 0x07, 0x00, 0x00);
#ifndef __7Z_SET_PROPERTIES
#ifdef EXTRACT_ONLY
#ifdef COMPRESS_MT
#define __7Z_SET_PROPERTIES
#endif
#else
#define __7Z_SET_PROPERTIES
#endif
#endif
class CHandler:
public IInArchive,
#ifdef _7Z_VOL
public IInArchiveGetStream,
#endif
#ifdef __7Z_SET_PROPERTIES
public ISetProperties,
#endif
#ifndef EXTRACT_ONLY
public IOutArchive,
#endif
public CMyUnknownImp
{
public:
MY_QUERYINTERFACE_BEGIN
#ifdef _7Z_VOL
MY_QUERYINTERFACE_ENTRY(IInArchiveGetStream)
#endif
#ifdef __7Z_SET_PROPERTIES
MY_QUERYINTERFACE_ENTRY(ISetProperties)
#endif
#ifndef EXTRACT_ONLY
MY_QUERYINTERFACE_ENTRY(IOutArchive)
#endif
MY_QUERYINTERFACE_END
MY_ADDREF_RELEASE
STDMETHOD(Open)(IInStream *stream,
const UInt64 *maxCheckStartPosition,
IArchiveOpenCallback *openArchiveCallback);
STDMETHOD(Close)();
STDMETHOD(GetNumberOfItems)(UInt32 *numItems);
STDMETHOD(GetProperty)(UInt32 index, PROPID propID, PROPVARIANT *value);
STDMETHOD(Extract)(const UInt32* indices, UInt32 numItems,
Int32 testMode, IArchiveExtractCallback *extractCallback);
STDMETHOD(GetArchiveProperty)(PROPID propID, PROPVARIANT *value);
STDMETHOD(GetNumberOfProperties)(UInt32 *numProperties);
STDMETHOD(GetPropertyInfo)(UInt32 index,
BSTR *name, PROPID *propID, VARTYPE *varType);
STDMETHOD(GetNumberOfArchiveProperties)(UInt32 *numProperties);
STDMETHOD(GetArchivePropertyInfo)(UInt32 index,
BSTR *name, PROPID *propID, VARTYPE *varType);
#ifdef _7Z_VOL
STDMETHOD(GetStream)(UInt32 index, ISequentialInStream **stream);
#endif
#ifdef __7Z_SET_PROPERTIES
STDMETHOD(SetProperties)(const wchar_t **names, const PROPVARIANT *values, Int32 numProperties);
#endif
#ifndef EXTRACT_ONLY
// IOutArchiveHandler
STDMETHOD(UpdateItems)(ISequentialOutStream *outStream, UInt32 numItems,
IArchiveUpdateCallback *updateCallback);
STDMETHOD(GetFileTimeType)(UInt32 *type);
// ISetProperties
HRESULT SetSolidSettings(const UString &s);
HRESULT SetSolidSettings(const PROPVARIANT &value);
#endif
CHandler();
private:
#ifdef _7Z_VOL
CObjectVector<CVolume> _volumes;
CObjectVector<CRef> _refs;
#else
CMyComPtr<IInStream> _inStream;
NArchive::N7z::CArchiveDatabaseEx _database;
#endif
#ifdef COMPRESS_MT
UInt32 _numThreads;
#endif
#ifndef EXTRACT_ONLY
CObjectVector<COneMethodInfo> _methods;
CRecordVector<CBind> _binds;
bool _removeSfxBlock;
UInt64 _numSolidFiles;
UInt64 _numSolidBytes;
bool _numSolidBytesDefined;
bool _solidExtension;
bool _compressHeaders;
bool _compressHeadersFull;
bool _encryptHeaders;
bool _autoFilter;
UInt32 _level;
bool _volumeMode;
HRESULT SetParam(COneMethodInfo &oneMethodInfo, const UString &name, const UString &value);
HRESULT SetParams(COneMethodInfo &oneMethodInfo, const UString &srcString);
HRESULT SetPassword(CCompressionMethodMode &methodMode,
IArchiveUpdateCallback *updateCallback);
HRESULT SetCompressionMethod(CCompressionMethodMode &method,
CObjectVector<COneMethodInfo> &methodsInfo
#ifdef COMPRESS_MT
, UInt32 numThreads
#endif
);
HRESULT SetCompressionMethod(
CCompressionMethodMode &method,
CCompressionMethodMode &headerMethod);
#endif
#ifndef _SFX
CRecordVector<UInt64> _fileInfoPopIDs;
void FillPopIDs();
#endif
#ifndef EXTRACT_ONLY
void InitSolidFiles() { _numSolidFiles = UInt64(Int64(-1)); }
void InitSolidSize() { _numSolidBytes = UInt64(Int64(-1)); }
void InitSolid()
{
InitSolidFiles();
InitSolidSize();
_solidExtension = false;
_numSolidBytesDefined = false;
}
void Init()
{
_removeSfxBlock = false;
_compressHeaders = true;
_compressHeadersFull = true;
_encryptHeaders = false;
#ifdef COMPRESS_MT
_numThreads = NWindows::NSystem::GetNumberOfProcessors();
#endif
_level = 5;
_autoFilter = true;
_volumeMode = false;
InitSolid();
}
#endif
};
}}
#endif

View file

@ -0,0 +1,19 @@
// 7z/Header.cpp
#include "StdAfx.h"
#include "7zHeader.h"
namespace NArchive {
namespace N7z {
Byte kSignature[kSignatureSize] = {'7' + 1, 'z', 0xBC, 0xAF, 0x27, 0x1C};
Byte kFinishSignature[kSignatureSize] = {'7' + 1, 'z', 0xBC, 0xAF, 0x27, 0x1C + 1};
class SignatureInitializer
{
public:
SignatureInitializer() { kSignature[0]--; kFinishSignature[0]--;};
} g_SignatureInitializer;
}}

View file

@ -0,0 +1,96 @@
// 7z/7zHeader.h
#ifndef __7Z_HEADER_H
#define __7Z_HEADER_H
#include "7zMethodID.h"
namespace NArchive {
namespace N7z {
const int kSignatureSize = 6;
extern Byte kSignature[kSignatureSize];
// #define _7Z_VOL
// 7z-MultiVolume is not finished yet.
// It can work already, but I still do not like some
// things of that new multivolume format.
// So please keep it commented.
#ifdef _7Z_VOL
extern Byte kFinishSignature[kSignatureSize];
#endif
struct CArchiveVersion
{
Byte Major;
Byte Minor;
};
const Byte kMajorVersion = 0;
struct CStartHeader
{
UInt64 NextHeaderOffset;
UInt64 NextHeaderSize;
UInt32 NextHeaderCRC;
};
const UInt32 kStartHeaderSize = 20;
#ifdef _7Z_VOL
struct CFinishHeader: public CStartHeader
{
UInt64 ArchiveStartOffset; // data offset from end if that struct
UInt64 AdditionalStartBlockSize; // start signature & start header size
};
const UInt32 kFinishHeaderSize = kStartHeaderSize + 16;
#endif
namespace NID
{
enum EEnum
{
kEnd,
kHeader,
kArchiveProperties,
kAdditionalStreamsInfo,
kMainStreamsInfo,
kFilesInfo,
kPackInfo,
kUnPackInfo,
kSubStreamsInfo,
kSize,
kCRC,
kFolder,
kCodersUnPackSize,
kNumUnPackStream,
kEmptyStream,
kEmptyFile,
kAnti,
kName,
kCreationTime,
kLastAccessTime,
kLastWriteTime,
kWinAttributes,
kComment,
kEncodedHeader,
kStartPos
};
}
}}
#endif

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,288 @@
// 7zIn.h
#ifndef __7Z_IN_H
#define __7Z_IN_H
#include "../../IStream.h"
#include "../../IPassword.h"
#include "../../../Common/MyCom.h"
#include "../../Common/InBuffer.h"
#include "7zHeader.h"
#include "7zItem.h"
namespace NArchive {
namespace N7z {
class CInArchiveException
{
public:
enum CCauseType
{
kUnsupportedVersion = 0,
kUnexpectedEndOfArchive = 0,
kIncorrectHeader,
} Cause;
CInArchiveException(CCauseType cause);
};
struct CInArchiveInfo
{
CArchiveVersion Version;
UInt64 StartPosition;
UInt64 StartPositionAfterHeader;
UInt64 DataStartPosition;
UInt64 DataStartPosition2;
CRecordVector<UInt64> FileInfoPopIDs;
void Clear()
{
FileInfoPopIDs.Clear();
}
};
struct CArchiveDatabaseEx: public CArchiveDatabase
{
CInArchiveInfo ArchiveInfo;
CRecordVector<UInt64> PackStreamStartPositions;
CRecordVector<CNum> FolderStartPackStreamIndex;
CRecordVector<CNum> FolderStartFileIndex;
CRecordVector<CNum> FileIndexToFolderIndexMap;
void Clear()
{
CArchiveDatabase::Clear();
ArchiveInfo.Clear();
PackStreamStartPositions.Clear();
FolderStartPackStreamIndex.Clear();
FolderStartFileIndex.Clear();
FileIndexToFolderIndexMap.Clear();
}
void FillFolderStartPackStream();
void FillStartPos();
void FillFolderStartFileIndex();
void Fill()
{
FillFolderStartPackStream();
FillStartPos();
FillFolderStartFileIndex();
}
UInt64 GetFolderStreamPos(int folderIndex, int indexInFolder) const
{
return ArchiveInfo.DataStartPosition +
PackStreamStartPositions[FolderStartPackStreamIndex[folderIndex] +
indexInFolder];
}
UInt64 GetFolderFullPackSize(int folderIndex) const
{
CNum packStreamIndex = FolderStartPackStreamIndex[folderIndex];
const CFolder &folder = Folders[folderIndex];
UInt64 size = 0;
for (int i = 0; i < folder.PackStreams.Size(); i++)
size += PackSizes[packStreamIndex + i];
return size;
}
UInt64 GetFolderPackStreamSize(int folderIndex, int streamIndex) const
{
return PackSizes[FolderStartPackStreamIndex[folderIndex] + streamIndex];
}
UInt64 GetFilePackSize(CNum fileIndex) const
{
CNum folderIndex = FileIndexToFolderIndexMap[fileIndex];
if (folderIndex >= 0)
{
if (FolderStartFileIndex[folderIndex] == fileIndex)
return GetFolderFullPackSize(folderIndex);
}
return 0;
}
};
class CInByte2
{
const Byte *_buffer;
size_t _size;
size_t _pos;
public:
void Init(const Byte *buffer, size_t size)
{
_buffer = buffer;
_size = size;
_pos = 0;
}
bool ReadByte(Byte &b)
{
if(_pos >= _size)
return false;
b = _buffer[_pos++];
return true;
}
void ReadBytes(void *data, size_t size, size_t &processedSize)
{
for(processedSize = 0; processedSize < size && _pos < _size; processedSize++)
((Byte *)data)[processedSize] = _buffer[_pos++];
}
bool ReadBytes(void *data, size_t size)
{
size_t processedSize;
ReadBytes(data, size, processedSize);
return (processedSize == size);
}
size_t GetProcessedSize() const { return _pos; }
};
class CStreamSwitch;
class CInArchive
{
friend class CStreamSwitch;
CMyComPtr<IInStream> _stream;
#ifdef _7Z_VOL
bool _finishSignature;
#endif
CObjectVector<CInByte2> _inByteVector;
CInByte2 *_inByteBack;
UInt64 _arhiveBeginStreamPosition;
UInt64 _position;
void AddByteStream(const Byte *buffer, size_t size)
{
_inByteVector.Add(CInByte2());
_inByteBack = &_inByteVector.Back();
_inByteBack->Init(buffer, size);
}
void DeleteByteStream()
{
_inByteVector.DeleteBack();
if (!_inByteVector.IsEmpty())
_inByteBack = &_inByteVector.Back();
}
private:
HRESULT FindAndReadSignature(IInStream *stream, const UInt64 *searchHeaderSizeLimit); // S_FALSE means is not archive
#ifdef _7Z_VOL
HRESULT FindFinishSignature(IInStream *stream, const UInt64 *searchHeaderSizeLimit); // S_FALSE means is not archive
#endif
HRESULT ReadFileNames(CObjectVector<CFileItem> &files);
HRESULT ReadDirect(IInStream *stream, void *data, UInt32 size,
UInt32 *processedSize);
HRESULT ReadDirect(void *data, UInt32 size, UInt32 *processedSize);
HRESULT SafeReadDirect(void *data, UInt32 size);
HRESULT SafeReadDirectByte(Byte &b);
HRESULT SafeReadDirectUInt32(UInt32 &value);
HRESULT SafeReadDirectUInt64(UInt64 &value);
HRESULT ReadBytes(void *data, size_t size)
{
if (!_inByteBack->ReadBytes(data, size))
return E_FAIL;
return S_OK;
}
HRESULT ReadByte(Byte &b)
{
if (!_inByteBack->ReadByte(b))
return E_FAIL;
return S_OK;
}
HRESULT ReadWideCharLE(wchar_t &c)
{
Byte b1;
if (!_inByteBack->ReadByte(b1))
return E_FAIL;
Byte b2;
if (!_inByteBack->ReadByte(b2))
return E_FAIL;
c = (wchar_t(b2) << 8) + b1;
return S_OK;
}
HRESULT ReadNumber(UInt64 &value);
HRESULT ReadNum(CNum &value);
HRESULT ReadID(UInt64 &value) { return ReadNumber(value); }
HRESULT ReadUInt32(UInt32 &value);
HRESULT ReadUInt64(UInt64 &value);
HRESULT SkeepData(UInt64 size);
HRESULT SkeepData();
HRESULT WaitAttribute(UInt64 attribute);
HRESULT ReadArchiveProperties(CInArchiveInfo &archiveInfo);
HRESULT GetNextFolderItem(CFolder &itemInfo);
HRESULT ReadHashDigests(int numItems,
CRecordVector<bool> &digestsDefined, CRecordVector<UInt32> &digests);
HRESULT ReadPackInfo(
UInt64 &dataOffset,
CRecordVector<UInt64> &packSizes,
CRecordVector<bool> &packCRCsDefined,
CRecordVector<UInt32> &packCRCs);
HRESULT ReadUnPackInfo(
const CObjectVector<CByteBuffer> *dataVector,
CObjectVector<CFolder> &folders);
HRESULT ReadSubStreamsInfo(
const CObjectVector<CFolder> &folders,
CRecordVector<CNum> &numUnPackStreamsInFolders,
CRecordVector<UInt64> &unPackSizes,
CRecordVector<bool> &digestsDefined,
CRecordVector<UInt32> &digests);
HRESULT ReadStreamsInfo(
const CObjectVector<CByteBuffer> *dataVector,
UInt64 &dataOffset,
CRecordVector<UInt64> &packSizes,
CRecordVector<bool> &packCRCsDefined,
CRecordVector<UInt32> &packCRCs,
CObjectVector<CFolder> &folders,
CRecordVector<CNum> &numUnPackStreamsInFolders,
CRecordVector<UInt64> &unPackSizes,
CRecordVector<bool> &digestsDefined,
CRecordVector<UInt32> &digests);
HRESULT GetNextFileItem(CFileItem &itemInfo);
HRESULT ReadBoolVector(int numItems, CBoolVector &v);
HRESULT ReadBoolVector2(int numItems, CBoolVector &v);
HRESULT ReadTime(const CObjectVector<CByteBuffer> &dataVector,
CObjectVector<CFileItem> &files, UInt64 type);
HRESULT ReadAndDecodePackedStreams(UInt64 baseOffset, UInt64 &dataOffset,
CObjectVector<CByteBuffer> &dataVector
#ifndef _NO_CRYPTO
, ICryptoGetTextPassword *getTextPassword
#endif
);
HRESULT ReadHeader(CArchiveDatabaseEx &database
#ifndef _NO_CRYPTO
,ICryptoGetTextPassword *getTextPassword
#endif
);
public:
HRESULT Open(IInStream *stream, const UInt64 *searchHeaderSizeLimit); // S_FALSE means is not archive
void Close();
HRESULT ReadDatabase(CArchiveDatabaseEx &database
#ifndef _NO_CRYPTO
,ICryptoGetTextPassword *getTextPassword
#endif
);
};
}}
#endif

View file

@ -0,0 +1,181 @@
// 7zItem.h
#ifndef __7Z_ITEM_H
#define __7Z_ITEM_H
#include "../../../Common/Buffer.h"
#include "7zMethodID.h"
#include "7zHeader.h"
namespace NArchive {
namespace N7z {
struct CAltCoderInfo
{
CMethodID MethodID;
CByteBuffer Properties;
};
typedef UInt32 CNum;
const CNum kNumMax = 0x7FFFFFFF;
const CNum kNumNoIndex = 0xFFFFFFFF;
struct CCoderInfo
{
CNum NumInStreams;
CNum NumOutStreams;
CObjectVector<CAltCoderInfo> AltCoders;
bool IsSimpleCoder() const { return (NumInStreams == 1) && (NumOutStreams == 1); }
};
struct CBindPair
{
CNum InIndex;
CNum OutIndex;
};
struct CFolder
{
CObjectVector<CCoderInfo> Coders;
CRecordVector<CBindPair> BindPairs;
CRecordVector<CNum> PackStreams;
CRecordVector<UInt64> UnPackSizes;
UInt32 UnPackCRC;
bool UnPackCRCDefined;
CFolder(): UnPackCRCDefined(false) {}
UInt64 GetUnPackSize() const // test it
{
if (UnPackSizes.IsEmpty())
return 0;
for (int i = UnPackSizes.Size() - 1; i >= 0; i--)
if (FindBindPairForOutStream(i) < 0)
return UnPackSizes[i];
throw 1;
}
CNum GetNumOutStreams() const
{
CNum result = 0;
for (int i = 0; i < Coders.Size(); i++)
result += Coders[i].NumOutStreams;
return result;
}
int FindBindPairForInStream(CNum inStreamIndex) const
{
for(int i = 0; i < BindPairs.Size(); i++)
if (BindPairs[i].InIndex == inStreamIndex)
return i;
return -1;
}
int FindBindPairForOutStream(CNum outStreamIndex) const
{
for(int i = 0; i < BindPairs.Size(); i++)
if (BindPairs[i].OutIndex == outStreamIndex)
return i;
return -1;
}
int FindPackStreamArrayIndex(CNum inStreamIndex) const
{
for(int i = 0; i < PackStreams.Size(); i++)
if (PackStreams[i] == inStreamIndex)
return i;
return -1;
}
};
typedef FILETIME CArchiveFileTime;
class CFileItem
{
public:
CArchiveFileTime CreationTime;
CArchiveFileTime LastWriteTime;
CArchiveFileTime LastAccessTime;
UInt64 UnPackSize;
UInt64 StartPos;
UInt32 Attributes;
UInt32 FileCRC;
UString Name;
bool HasStream; // Test it !!! it means that there is
// stream in some folder. It can be empty stream
bool IsDirectory;
bool IsAnti;
bool IsFileCRCDefined;
bool AreAttributesDefined;
bool IsCreationTimeDefined;
bool IsLastWriteTimeDefined;
bool IsLastAccessTimeDefined;
bool IsStartPosDefined;
/*
const bool HasStream() const {
return !IsDirectory && !IsAnti && UnPackSize != 0; }
*/
CFileItem():
HasStream(true),
IsDirectory(false),
IsAnti(false),
IsFileCRCDefined(false),
AreAttributesDefined(false),
IsCreationTimeDefined(false),
IsLastWriteTimeDefined(false),
IsLastAccessTimeDefined(false),
IsStartPosDefined(false)
{}
void SetAttributes(UInt32 attributes)
{
AreAttributesDefined = true;
Attributes = attributes;
}
void SetCreationTime(const CArchiveFileTime &creationTime)
{
IsCreationTimeDefined = true;
CreationTime = creationTime;
}
void SetLastWriteTime(const CArchiveFileTime &lastWriteTime)
{
IsLastWriteTimeDefined = true;
LastWriteTime = lastWriteTime;
}
void SetLastAccessTime(const CArchiveFileTime &lastAccessTime)
{
IsLastAccessTimeDefined = true;
LastAccessTime = lastAccessTime;
}
};
struct CArchiveDatabase
{
CRecordVector<UInt64> PackSizes;
CRecordVector<bool> PackCRCsDefined;
CRecordVector<UInt32> PackCRCs;
CObjectVector<CFolder> Folders;
CRecordVector<CNum> NumUnPackStreamsVector;
CObjectVector<CFileItem> Files;
void Clear()
{
PackSizes.Clear();
PackCRCsDefined.Clear();
PackCRCs.Clear();
Folders.Clear();
NumUnPackStreamsVector.Clear();
Files.Clear();
}
bool IsEmpty() const
{
return (PackSizes.IsEmpty() &&
PackCRCsDefined.IsEmpty() &&
PackCRCs.IsEmpty() &&
Folders.IsEmpty() &&
NumUnPackStreamsVector.IsEmpty() &&
Files.IsEmpty());
}
};
}}
#endif

View file

@ -0,0 +1,76 @@
// 7zMethodID.cpp
#include "StdAfx.h"
#include "7zMethodID.h"
namespace NArchive {
namespace N7z {
static wchar_t GetHex(Byte value)
{
return (value < 10) ? ('0' + value) : ('A' + (value - 10));
}
static bool HexCharToInt(wchar_t value, Byte &result)
{
if (value >= '0' && value <= '9')
result = value - '0';
else if (value >= 'a' && value <= 'f')
result = 10 + value - 'a';
else if (value >= 'A' && value <= 'F')
result = 10 + value - 'A';
else
return false;
return true;
}
static bool TwoHexCharsToInt(wchar_t valueHigh, wchar_t valueLow, Byte &result)
{
Byte resultHigh, resultLow;
if (!HexCharToInt(valueHigh, resultHigh))
return false;
if (!HexCharToInt(valueLow, resultLow))
return false;
result = (resultHigh << 4) + resultLow;
return true;
}
UString CMethodID::ConvertToString() const
{
UString result;
for (int i = 0; i < IDSize; i++)
{
Byte b = ID[i];
result += GetHex(b >> 4);
result += GetHex(b & 0xF);
}
return result;
}
bool CMethodID::ConvertFromString(const UString &srcString)
{
int length = srcString.Length();
if ((length & 1) != 0 || (length >> 1) > kMethodIDSize)
return false;
IDSize = length / 2;
UInt32 i;
for(i = 0; i < IDSize; i++)
if (!TwoHexCharsToInt(srcString[i * 2], srcString[i * 2 + 1], ID[i]))
return false;
for(; i < kMethodIDSize; i++)
ID[i] = 0;
return true;
}
bool operator==(const CMethodID &a1, const CMethodID &a2)
{
if (a1.IDSize != a2.IDSize)
return false;
for (UInt32 i = 0; i < a1.IDSize; i++)
if (a1.ID[i] != a2.ID[i])
return false;
return true;
}
}}

View file

@ -0,0 +1,29 @@
// 7zMethodID.h
#ifndef __7Z_METHOD_ID_H
#define __7Z_METHOD_ID_H
#include "../../../Common/String.h"
#include "../../../Common/Types.h"
namespace NArchive {
namespace N7z {
const int kMethodIDSize = 15;
struct CMethodID
{
Byte ID[kMethodIDSize];
Byte IDSize;
UString ConvertToString() const;
bool ConvertFromString(const UString &srcString);
};
bool operator==(const CMethodID &a1, const CMethodID &a2);
inline bool operator!=(const CMethodID &a1, const CMethodID &a2)
{ return !(a1 == a2); }
}}
#endif

View file

@ -0,0 +1,174 @@
// 7zMethods.cpp
#include "StdAfx.h"
#include "7zMethods.h"
#include "../../../Windows/FileFind.h"
#include "../../../Windows/DLL.h"
#include "../../../Windows/PropVariant.h"
#include "../../../Windows/Synchronization.h"
#include "../../ICoder.h"
#include "../Common/CodecsPath.h"
using namespace NWindows;
namespace NArchive {
namespace N7z {
static CObjectVector<CMethodInfo2> g_Methods;
static bool g_Loaded = false;
typedef UInt32 (WINAPI *GetNumberOfMethodsFunc)(UInt32 *numMethods);
typedef UInt32 (WINAPI *GetMethodPropertyFunc)(
UInt32 index, PROPID propID, PROPVARIANT *value);
static void Load(const CSysString &folderPrefix)
{
NFile::NFind::CEnumerator enumerator(folderPrefix + CSysString(TEXT("*")));
NFile::NFind::CFileInfo fileInfo;
while (enumerator.Next(fileInfo))
{
if (fileInfo.IsDirectory())
continue;
CSysString filePath = folderPrefix + fileInfo.Name;
{
NDLL::CLibrary library;
if (!library.LoadEx(filePath, LOAD_LIBRARY_AS_DATAFILE))
continue;
}
NDLL::CLibrary library;
if (!library.Load(filePath))
continue;
GetMethodPropertyFunc getMethodProperty = (GetMethodPropertyFunc)
library.GetProcAddress("GetMethodProperty");
if (getMethodProperty == NULL)
continue;
UInt32 numMethods = 1;
GetNumberOfMethodsFunc getNumberOfMethodsFunc = (GetNumberOfMethodsFunc)
library.GetProcAddress("GetNumberOfMethods");
if (getNumberOfMethodsFunc != NULL)
if (getNumberOfMethodsFunc(&numMethods) != S_OK)
continue;
for(UInt32 i = 0; i < numMethods; i++)
{
CMethodInfo2 info;
info.FilePath = filePath;
NWindows::NCOM::CPropVariant propVariant;
if (getMethodProperty(i, NMethodPropID::kID, &propVariant) != S_OK)
continue;
if (propVariant.vt != VT_BSTR)
continue;
info.MethodID.IDSize = SysStringByteLen(propVariant.bstrVal);
memmove(info.MethodID.ID, propVariant.bstrVal, info.MethodID.IDSize);
propVariant.Clear();
if (getMethodProperty(i, NMethodPropID::kName, &propVariant) != S_OK)
continue;
if (propVariant.vt == VT_EMPTY)
{
}
else if (propVariant.vt == VT_BSTR)
info.Name = propVariant.bstrVal;
else
continue;
propVariant.Clear();
if (getMethodProperty (i, NMethodPropID::kEncoder, &propVariant) != S_OK)
continue;
if (propVariant.vt == VT_EMPTY)
info.EncoderIsAssigned = false;
else if (propVariant.vt == VT_BSTR)
{
info.EncoderIsAssigned = true;
info.Encoder = *(const GUID *)propVariant.bstrVal;
}
else
continue;
propVariant.Clear();
if (getMethodProperty (i, NMethodPropID::kDecoder, &propVariant) != S_OK)
continue;
if (propVariant.vt == VT_EMPTY)
info.DecoderIsAssigned = false;
else if (propVariant.vt == VT_BSTR)
{
info.DecoderIsAssigned = true;
info.Decoder = *(const GUID *)propVariant.bstrVal;
}
else
continue;
propVariant.Clear();
if (getMethodProperty (i, NMethodPropID::kInStreams, &propVariant) != S_OK)
continue;
if (propVariant.vt == VT_EMPTY)
info.NumInStreams = 1;
else if (propVariant.vt == VT_UI4)
info.NumInStreams = propVariant.ulVal;
else
continue;
propVariant.Clear();
if (getMethodProperty (i, NMethodPropID::kOutStreams, &propVariant) != S_OK)
continue;
if (propVariant.vt == VT_EMPTY)
info.NumOutStreams = 1;
else if (propVariant.vt == VT_UI4)
info.NumOutStreams = propVariant.ulVal;
else
continue;
propVariant.Clear();
g_Methods.Add(info);
}
}
}
static NSynchronization::CCriticalSection g_CriticalSection;
void LoadMethodMap()
{
NSynchronization::CCriticalSectionLock lock(g_CriticalSection);
if (g_Loaded)
return;
g_Loaded = true;
Load(GetCodecsFolderPrefix());
}
bool GetMethodInfo(const CMethodID &methodID, CMethodInfo &methodInfo)
{
for(int i = 0; i < g_Methods.Size(); i++)
{
const CMethodInfo2 &method = g_Methods[i];
if (method.MethodID == methodID)
{
methodInfo = (CMethodInfo)method;
return true;
}
}
return false;
}
bool GetMethodInfo(const UString &name, CMethodInfo2 &methodInfo)
{
for(int i = 0; i < g_Methods.Size(); i++)
{
const CMethodInfo2 &method = g_Methods[i];
if (method.Name.CompareNoCase(name) == 0)
{
methodInfo = method;
return true;
}
}
return false;
}
}}

View file

@ -0,0 +1,36 @@
// 7zMethods.h
#ifndef __7Z_METHODS_H
#define __7Z_METHODS_H
#include "7zMethodID.h"
namespace NArchive {
namespace N7z {
struct CMethodInfo
{
UString Name;
bool EncoderIsAssigned;
bool DecoderIsAssigned;
UInt32 NumInStreams;
UInt32 NumOutStreams;
CLSID Encoder;
CLSID Decoder;
// UString Description;
CSysString FilePath;
};
struct CMethodInfo2: public CMethodInfo
{
CMethodID MethodID;
};
void LoadMethodMap();
bool GetMethodInfo(const CMethodID &methodID, CMethodInfo &methodInfo);
bool GetMethodInfo(const UString &name, CMethodInfo2 &methodInfo);
}}
#endif

View file

@ -0,0 +1,166 @@
// 7zProperties.cpp
#include "StdAfx.h"
#include "7zProperties.h"
#include "7zHeader.h"
#include "7zHandler.h"
// #define _MULTI_PACK
namespace NArchive {
namespace N7z {
struct CPropMap
{
UInt64 FilePropID;
STATPROPSTG StatPROPSTG;
};
CPropMap kPropMap[] =
{
{ NID::kName, NULL, kpidPath, VT_BSTR},
{ NID::kSize, NULL, kpidSize, VT_UI8},
{ NID::kPackInfo, NULL, kpidPackedSize, VT_UI8},
#ifdef _MULTI_PACK
{ 100, L"Pack0", kpidPackedSize0, VT_UI8},
{ 101, L"Pack1", kpidPackedSize1, VT_UI8},
{ 102, L"Pack2", kpidPackedSize2, VT_UI8},
{ 103, L"Pack3", kpidPackedSize3, VT_UI8},
{ 104, L"Pack4", kpidPackedSize4, VT_UI8},
#endif
{ NID::kCreationTime, NULL, kpidCreationTime, VT_FILETIME},
{ NID::kLastWriteTime, NULL, kpidLastWriteTime, VT_FILETIME},
{ NID::kLastAccessTime, NULL, kpidLastAccessTime, VT_FILETIME},
{ NID::kWinAttributes, NULL, kpidAttributes, VT_UI4},
{ NID::kStartPos, NULL, kpidPosition, VT_UI4},
{ NID::kCRC, NULL, kpidCRC, VT_UI4},
{ NID::kAnti, L"Anti", kpidIsAnti, VT_BOOL},
// { 97, NULL, kpidSolid, VT_BOOL},
#ifndef _SFX
{ 98, NULL, kpidMethod, VT_BSTR},
{ 99, L"Block", kpidBlock, VT_UI4}
#endif
// { L"ID", kpidID, VT_BSTR},
// { L"UnPack Version", kpidUnPackVersion, VT_UI1},
// { L"Host OS", kpidHostOS, VT_BSTR}
};
static const int kPropMapSize = sizeof(kPropMap) / sizeof(kPropMap[0]);
static int FindPropInMap(UInt64 filePropID)
{
for (int i = 0; i < kPropMapSize; i++)
if (kPropMap[i].FilePropID == filePropID)
return i;
return -1;
}
static void CopyOneItem(CRecordVector<UInt64> &src,
CRecordVector<UInt64> &dest, UInt32 item)
{
for (int i = 0; i < src.Size(); i++)
if (src[i] == item)
{
dest.Add(item);
src.Delete(i);
return;
}
}
static void RemoveOneItem(CRecordVector<UInt64> &src, UInt32 item)
{
for (int i = 0; i < src.Size(); i++)
if (src[i] == item)
{
src.Delete(i);
return;
}
}
static void InsertToHead(CRecordVector<UInt64> &dest, UInt32 item)
{
for (int i = 0; i < dest.Size(); i++)
if (dest[i] == item)
{
dest.Delete(i);
break;
}
dest.Insert(0, item);
}
void CHandler::FillPopIDs()
{
_fileInfoPopIDs.Clear();
#ifdef _7Z_VOL
if(_volumes.Size() < 1)
return;
const CVolume &volume = _volumes.Front();
const CArchiveDatabaseEx &_database = volume.Database;
#endif
CRecordVector<UInt64> fileInfoPopIDs = _database.ArchiveInfo.FileInfoPopIDs;
RemoveOneItem(fileInfoPopIDs, NID::kEmptyStream);
RemoveOneItem(fileInfoPopIDs, NID::kEmptyFile);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kName);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kAnti);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kSize);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kPackInfo);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kCreationTime);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kLastWriteTime);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kLastAccessTime);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kWinAttributes);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kCRC);
CopyOneItem(fileInfoPopIDs, _fileInfoPopIDs, NID::kComment);
_fileInfoPopIDs += fileInfoPopIDs;
#ifndef _SFX
_fileInfoPopIDs.Add(98);
_fileInfoPopIDs.Add(99);
#endif
#ifdef _MULTI_PACK
_fileInfoPopIDs.Add(100);
_fileInfoPopIDs.Add(101);
_fileInfoPopIDs.Add(102);
_fileInfoPopIDs.Add(103);
_fileInfoPopIDs.Add(104);
#endif
#ifndef _SFX
InsertToHead(_fileInfoPopIDs, NID::kLastWriteTime);
InsertToHead(_fileInfoPopIDs, NID::kPackInfo);
InsertToHead(_fileInfoPopIDs, NID::kSize);
InsertToHead(_fileInfoPopIDs, NID::kName);
#endif
}
STDMETHODIMP CHandler::GetNumberOfProperties(UInt32 *numProperties)
{
*numProperties = _fileInfoPopIDs.Size();
return S_OK;
}
STDMETHODIMP CHandler::GetPropertyInfo(UInt32 index,
BSTR *name, PROPID *propID, VARTYPE *varType)
{
if((int)index >= _fileInfoPopIDs.Size())
return E_INVALIDARG;
int indexInMap = FindPropInMap(_fileInfoPopIDs[index]);
if (indexInMap == -1)
return E_INVALIDARG;
const STATPROPSTG &srcItem = kPropMap[indexInMap].StatPROPSTG;
*propID = srcItem.propid;
*varType = srcItem.vt;
*name = 0;
return S_OK;
}
}}

View file

@ -0,0 +1,22 @@
// 7zProperties.h
#ifndef __7Z_PROPERTIES_H
#define __7Z_PROPERTIES_H
#include "../../PropID.h"
namespace NArchive {
namespace N7z {
enum // PropID
{
kpidPackedSize0 = kpidUserDefined,
kpidPackedSize1,
kpidPackedSize2,
kpidPackedSize3,
kpidPackedSize4,
};
}}
#endif

View file

@ -0,0 +1,3 @@
EXPORTS
CreateObject PRIVATE
GetHandlerProperty PRIVATE

Some files were not shown because too many files have changed in this diff Show more