Backups: use new locatorInfo & mediaName (#10627)

This commit is contained in:
trevor-signal 2025-06-18 13:16:29 -04:00 committed by GitHub
parent 099f94a809
commit fb0c414702
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
78 changed files with 2271 additions and 2842 deletions

View file

@ -234,7 +234,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
repository: 'signalapp/Signal-Message-Backup-Tests'
ref: '29023d042cafb07db3b37777440ab123f2ef40c0'
ref: '57807422c347f53cfa8d5a1423c382992a8fc199'
path: 'backup-integration-tests'
- run: xvfb-run --auto-servernum pnpm run test-electron

View file

@ -5294,7 +5294,7 @@ Signal Desktop makes use of the following open source projects.
libsignal makes use of the following open source projects.
## partial-default-derive 0.1.0, partial-default 0.1.0
## spqr 0.1.0, partial-default-derive 0.1.0, partial-default 0.1.0
```
GNU AFFERO GENERAL PUBLIC LICENSE
@ -5961,7 +5961,7 @@ For more information on this, and how to apply and follow the GNU AGPL, see
```
## libsignal-account-keys, attest, libsignal-ffi, libsignal-jni, libsignal-jni-impl, libsignal-jni-testing, libsignal-node, signal-neon-futures, signal-neon-futures-tests, libsignal-bridge, libsignal-bridge-macros, libsignal-bridge-testing, libsignal-bridge-types, libsignal-core, signal-crypto, device-transfer, libsignal-keytrans, signal-media, libsignal-message-backup, libsignal-message-backup-macros, libsignal-net, libsignal-net-infra, poksho, libsignal-protocol, usernames, zkcredential, zkgroup
## libsignal-account-keys, attest, libsignal-ffi, libsignal-jni, libsignal-jni-impl, libsignal-jni-testing, libsignal-node, signal-neon-futures, signal-neon-futures-tests, libsignal-bridge, libsignal-bridge-macros, libsignal-bridge-testing, libsignal-bridge-types, libsignal-cli-utils, libsignal-core, signal-crypto, device-transfer, libsignal-keytrans, signal-media, libsignal-message-backup, libsignal-message-backup-macros, libsignal-net, libsignal-net-chat, libsignal-net-infra, poksho, libsignal-protocol, usernames, zkcredential, zkgroup
```
GNU AFFERO GENERAL PUBLIC LICENSE
@ -6688,7 +6688,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
```
## hax-lib-macros 0.2.0, hax-lib 0.2.0
## hax-lib-macros 0.2.0, hax-lib-macros 0.3.1, hax-lib 0.2.0, hax-lib 0.3.1
```
@ -7092,6 +7092,213 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
```
## sorted-vec 0.8.6
```
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Shane Pearman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
## libcrux-intrinsics 0.0.2, libcrux-ml-kem 0.0.2, libcrux-platform 0.0.2, libcrux-sha3 0.0.2
```
@ -7676,6 +7883,85 @@ limitations under the License.
```
## libcrux-hacl-rs 0.0.2, libcrux-hkdf 0.0.2, libcrux-hmac 0.0.2, libcrux-macros 0.0.2, libcrux-sha2 0.0.2, libcrux-traits 0.0.2
```
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
## boring 4.15.0
```
@ -8833,7 +9119,7 @@ DEALINGS IN THE SOFTWARE.
```
## neon-macros 1.0.0, neon 1.0.0
## neon-macros 1.1.1, neon 1.1.1
```
Copyright (c) 2015 David Herman
@ -11548,7 +11834,7 @@ SOFTWARE.
```
## anstream 0.6.18, anstyle-query 1.1.2, clap 4.5.35, colorchoice 1.0.3, env_filter 0.1.3, env_logger 0.11.8, is_terminal_polyfill 1.70.1
## anstream 0.6.18, anstyle-query 1.1.2, clap 4.5.35, colorchoice 1.0.3, env_filter 0.1.3, env_logger 0.11.8, is_terminal_polyfill 1.70.1, toml_datetime 0.6.8, toml_edit 0.22.24
```
Copyright (c) Individual contributors
@ -12521,7 +12807,7 @@ SOFTWARE.
```
## curve25519-dalek-derive 0.1.1, adler2 2.0.0, anyhow 1.0.97, async-trait 0.1.88, atomic-waker 1.1.2, auto_enums 0.8.7, derive_utils 0.15.0, displaydoc 0.2.5, dyn-clone 1.0.19, fastrand 2.3.0, home 0.5.9, itoa 1.0.15, linkme-impl 0.3.32, linkme 0.3.32, linux-raw-sys 0.4.15, linux-raw-sys 0.9.3, minimal-lexical 0.2.1, once_cell 1.21.3, paste 1.0.15, pin-project-internal 1.1.10, pin-project-lite 0.2.16, pin-project 1.1.10, prettyplease 0.2.32, proc-macro2 1.0.94, quote 1.0.40, rustc-hash 1.1.0, rustix 0.38.44, rustix 1.0.5, rustversion 1.0.20, semver 1.0.26, send_wrapper 0.6.0, serde 1.0.219, serde_derive 1.0.219, serde_json 1.0.140, syn-mid 0.6.0, syn 2.0.100, thiserror-impl 1.0.69, thiserror-impl 2.0.12, thiserror 1.0.69, thiserror 2.0.12, unicode-ident 1.0.18, utf-8 0.7.6
## curve25519-dalek-derive 0.1.1, adler2 2.0.0, anyhow 1.0.97, async-trait 0.1.88, atomic-waker 1.1.2, auto_enums 0.8.7, derive_utils 0.15.0, displaydoc 0.2.5, dyn-clone 1.0.19, fastrand 2.3.0, home 0.5.9, itoa 1.0.15, linkme-impl 0.3.33, linkme 0.3.33, linux-raw-sys 0.4.15, linux-raw-sys 0.9.3, minimal-lexical 0.2.1, num_enum 0.7.3, num_enum_derive 0.7.3, once_cell 1.21.3, paste 1.0.15, pin-project-internal 1.1.10, pin-project-lite 0.2.16, pin-project 1.1.10, prettyplease 0.2.32, proc-macro-crate 3.3.0, proc-macro2 1.0.94, quote 1.0.40, rustc-hash 1.1.0, rustix 0.38.44, rustix 1.0.5, rustversion 1.0.20, semver 1.0.26, send_wrapper 0.6.0, serde 1.0.219, serde_derive 1.0.219, serde_json 1.0.140, syn-mid 0.6.0, syn 2.0.100, thiserror-impl 1.0.69, thiserror-impl 2.0.12, thiserror 1.0.69, thiserror 2.0.12, unicode-ident 1.0.18, utf-8 0.7.6
```
Permission is hereby granted, free of charge, to any
@ -12550,6 +12836,30 @@ DEALINGS IN THE SOFTWARE.
```
## winnow 0.7.4
```
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
```
## android_system_properties 0.1.5
```

View file

@ -19,7 +19,10 @@ import z from 'zod';
import GrowingFile from 'growing-file';
import { isNumber } from 'lodash';
import { decryptAttachmentV2ToSink } from '../ts/AttachmentCrypto';
import {
type DecryptAttachmentToSinkOptionsType,
decryptAttachmentV2ToSink,
} from '../ts/AttachmentCrypto';
import * as Bytes from '../ts/Bytes';
import type { MessageAttachmentsCursorType } from '../ts/sql/Interface';
import type { MainSQL } from '../ts/sql/main';
@ -137,15 +140,18 @@ async function safeDecryptToSink(
});
file.pipe(ciphertextStream);
const options = {
const options: DecryptAttachmentToSinkOptionsType = {
ciphertextStream,
idForLogging: 'attachment_channel/incremental',
keysBase64: ctx.keysBase64,
size: ctx.size,
theirChunkSize: ctx.chunkSize,
theirDigest: ctx.digest,
theirIncrementalMac: ctx.incrementalMac,
type: 'standard' as const,
type: 'standard',
integrityCheck: {
type: 'encrypted',
digest: ctx.digest,
},
};
const controller = new AbortController();

View file

@ -120,7 +120,7 @@
"@react-aria/utils": "3.25.3",
"@react-spring/web": "9.7.5",
"@react-types/shared": "3.27.0",
"@signalapp/libsignal-client": "0.72.1",
"@signalapp/libsignal-client": "0.74.1",
"@signalapp/quill-cjs": "2.1.2",
"@signalapp/ringrtc": "2.53.0",
"@signalapp/sqlcipher": "2.0.3",
@ -222,7 +222,7 @@
"@indutny/parallel-prettier": "3.0.0",
"@indutny/rezip-electron": "2.0.1",
"@napi-rs/canvas": "0.1.61",
"@signalapp/mock-server": "11.3.1",
"@signalapp/mock-server": "12.0.0",
"@storybook/addon-a11y": "8.4.4",
"@storybook/addon-actions": "8.4.4",
"@storybook/addon-controls": "8.4.4",
@ -611,7 +611,7 @@
"!node_modules/spellchecker/vendor/hunspell/**/*",
"!node_modules/@formatjs/intl-displaynames/**/*",
"!node_modules/@formatjs/intl-listformat/**/*",
"!**/node_modules/*/{CHANGELOG.md,README.md,README,readme.md,readme,test,__tests__,tests,powered-test,example,examples,*.d.ts,*d.ts.map,*.js.map,*.gypi,.snyk-*.flag,benchmark}",
"!**/node_modules/*/{CHANGELOG.md,README.md,README,readme.md,readme,test,__tests__,tests,powered-test,example,examples,*.d.ts,*.d.ts.map,*.js.map,*.gypi,.snyk-*.flag,benchmark}",
"!**/node_modules/.bin",
"!**/node_modules/**/build/**",
"node_modules/intl-tel-input/build/**",

20
pnpm-lock.yaml generated
View file

@ -129,8 +129,8 @@ importers:
specifier: 3.27.0
version: 3.27.0(react@18.3.1)
'@signalapp/libsignal-client':
specifier: 0.72.1
version: 0.72.1
specifier: 0.74.1
version: 0.74.1
'@signalapp/quill-cjs':
specifier: 2.1.2
version: 2.1.2
@ -430,8 +430,8 @@ importers:
specifier: 0.1.61
version: 0.1.61
'@signalapp/mock-server':
specifier: 11.3.1
version: 11.3.1(bufferutil@4.0.9)(utf-8-validate@5.0.10)
specifier: 12.0.0
version: 12.0.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)
'@storybook/addon-a11y':
specifier: 8.4.4
version: 8.4.4(storybook@8.4.4(bufferutil@4.0.9)(prettier@3.3.3)(utf-8-validate@5.0.10))
@ -2767,11 +2767,11 @@ packages:
'@signalapp/libsignal-client@0.60.2':
resolution: {integrity: sha512-tU4kNP/yCwkFntb2ahXOSQJtzdy+YifAB2yv5hw0qyKSidRHLn6bYiz4Zo2tjxLDRoBLAUxCRsQramStiqNZdA==}
'@signalapp/libsignal-client@0.72.1':
resolution: {integrity: sha512-YFIxrp8KVo0+q3lZBw/CEFbnPNbUsmfD3JRLu1cWH2nDLbAGwVzmbOGAzhXDzui5jO2PyEr8wXKqRpVbNKy1pw==}
'@signalapp/libsignal-client@0.74.1':
resolution: {integrity: sha512-PEJou0yrBvxaAGg7JjONlRNM/t3PCBuY96wu7W6+57e38/7Mibo9kAMfE5B8DgVv+DUNMW9AgJhx5McCoIXYew==}
'@signalapp/mock-server@11.3.1':
resolution: {integrity: sha512-nh/ynnz/3728M0Eh4BAa0kVcpSml6nyF9Vb2AV3WhlZ1VYJe5+IlshXfs57XwNghg4bHhOr9hVxIKuw/QK6ptg==}
'@signalapp/mock-server@12.0.0':
resolution: {integrity: sha512-5Ebu2c3/BViNsZ4yId8zfHyazMGUmsSfjMXXXFwNn7IYw0M0l/u+FFiR8SJdFnLoBbcxHG+KC3P+QqPdn91FIQ==}
'@signalapp/parchment-cjs@3.0.1':
resolution: {integrity: sha512-hSBMQ1M7wE4GcC8ZeNtvpJF+DAJg3eIRRf1SiHS3I3Algav/sgJJNm6HIYm6muHuK7IJmuEjkL3ILSXgmu0RfQ==}
@ -12460,13 +12460,13 @@ snapshots:
type-fest: 4.26.1
uuid: 8.3.2
'@signalapp/libsignal-client@0.72.1':
'@signalapp/libsignal-client@0.74.1':
dependencies:
node-gyp-build: 4.8.4
type-fest: 4.26.1
uuid: 11.0.2
'@signalapp/mock-server@11.3.1(bufferutil@4.0.9)(utf-8-validate@5.0.10)':
'@signalapp/mock-server@12.0.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)':
dependencies:
'@indutny/parallel-prettier': 3.0.0(prettier@3.3.3)
'@signalapp/libsignal-client': 0.60.2

View file

@ -88,6 +88,9 @@ message AccountData {
PhoneNumberSharingMode phoneNumberSharingMode = 17;
ChatStyle defaultChatStyle = 18;
repeated ChatStyle.CustomChatColor customChatColors = 19;
bool optimizeOnDeviceStorage = 20;
// See zkgroup for integer particular values. Unset if backups are not enabled.
optional uint64 backupTier = 21;
}
message SubscriberData {
@ -690,6 +693,7 @@ message MessageAttachment {
message FilePointer {
// References attachments in the backup (media) storage tier.
// DEPRECATED; use LocatorInfo instead if available.
message BackupLocator {
string mediaName = 1;
// If present, the cdn number of the succesful upload.
@ -699,6 +703,7 @@ message FilePointer {
bytes key = 3;
bytes digest = 4;
uint32 size = 5;
// Fallback in case backup tier upload failed.
optional string transitCdnKey = 6;
optional uint32 transitCdnNumber = 7;
@ -708,6 +713,7 @@ message FilePointer {
// May be downloaded or not when the backup is generated;
// primarily for free-tier users who cannot copy the
// attachments to the backup (media) storage tier.
// DEPRECATED; use LocatorInfo instead if available.
message AttachmentLocator {
string cdnKey = 1;
uint32 cdnNumber = 2;
@ -722,12 +728,14 @@ message FilePointer {
// CDN keys or anything else that makes download attempts impossible.
// This serves as a 'tombstone' so that the UX can show that an attachment
// did exist, but for whatever reason it's not retrievable.
// DEPRECATED; use LocatorInfo instead if available.
message InvalidAttachmentLocator {
}
// References attachments in a local encrypted backup.
// Importers should first attempt to read the file from the local backup,
// and on failure fallback to backup and transit cdn if possible.
// DEPRECATED; use LocatorInfo instead if available.
message LocalLocator {
string mediaName = 1;
// Separate key used to encrypt this file for the local backup.
@ -743,7 +751,57 @@ message FilePointer {
optional uint32 transitCdnNumber = 8;
}
message LocatorInfo {
// Must be non-empty if transitCdnKey or plaintextHash are set/nonempty.
// Otherwise must be empty.
bytes key = 1;
// From the sender of the attachment (incl. ourselves)
// Will be reserved once all clients start reading integrityCheck
bytes legacyDigest = 2;
oneof integrityCheck {
// Set if file was at one point downloaded and its plaintextHash was calculated
bytes plaintextHash = 10;
// Set if file has not been downloaded so its integrity has not been verified
// From the sender of the attachment
bytes encryptedDigest = 11;
}
// NB: This is the plaintext size, and empty content attachments are legal, so this
// may be zero even if transitCdnKey or mediaName are set/nonempty.
uint32 size = 3;
// Either both transit cdn key and number are set or neither should be set.
// Upload timestamp is optional but should only be set if key/number are set.
optional string transitCdnKey = 4;
optional uint32 transitCdnNumber = 5;
optional uint64 transitTierUploadTimestamp = 6;
// If present, the cdn number of the succesful upload to media tier.
// If unset, may still have been uploaded, and clients
// can discover the cdn number via the list endpoint.
// Exporting clients should set this as long as their subscription
// has not rotated since last upload; even if currently free tier.
optional uint32 mediaTierCdnNumber = 7;
// Nonempty any time the attachment was downloaded and its
// digest validated, whether free tier or paid subscription.
// Will be reserved once all clients start reading integrityCheck,
// when mediaName will be derived from the plaintextHash and encryption key
string legacyMediaName = 8;
// Separate key used to encrypt this file for the local backup.
// Generally required for local backups.
// Missing field indicates attachment was not available locally
// when the backup was generated, but remote backup or transit
// info was available.
optional bytes localKey = 9;
}
// If unset, importers should consider it to be an InvalidAttachmentLocator without throwing an error.
// DEPRECATED; use locatorInfo instead.
oneof locator {
BackupLocator backupLocator = 1;
AttachmentLocator attachmentLocator = 2;
@ -759,6 +817,7 @@ message FilePointer {
optional uint32 height = 9;
optional string caption = 10;
optional string blurHash = 11;
LocatorInfo locatorInfo = 13;
}
message Quote {
@ -1333,4 +1392,4 @@ message ChatFolder {
repeated uint64 includedRecipientIds = 7; // generated recipient id of groups, contacts, and/or note to self
repeated uint64 excludedRecipientIds = 8; // generated recipient id of groups, contacts, and/or note to self
bytes id = 9; // should be 16 bytes
}
}

View file

@ -26,8 +26,11 @@ import {
HashType,
CipherType,
IV_LENGTH,
KEY_LENGTH,
MAC_LENGTH,
KEY_SET_LENGTH,
PLAINTEXT_HASH_LENGTH,
DIGEST_LENGTH,
ATTACHMENT_MAC_LENGTH,
AES_KEY_LENGTH,
} from './types/Crypto';
import { constantTimeEqual } from './Crypto';
import { createName, getRelativePath } from './util/attachmentPath';
@ -35,7 +38,6 @@ import { appendPaddingStream, logPadSize } from './util/logPadding';
import { prependStream } from './util/prependStream';
import { appendMacStream } from './util/appendMacStream';
import { finalStream } from './util/finalStream';
import { getIvAndDecipher } from './util/getIvAndDecipher';
import { getMacAndUpdateHmac } from './util/getMacAndUpdateHmac';
import { trimPadding } from './util/trimPadding';
import { assertDev, strictAssert } from './util/assert';
@ -43,22 +45,14 @@ import * as Errors from './types/errors';
import { isNotNil } from './util/isNotNil';
import { missingCaseError } from './util/missingCaseError';
import { getEnvironment, Environment } from './environment';
import { toBase64 } from './Bytes';
import { isNotEmpty, toBase64, toHex } from './Bytes';
import { decipherWithAesKey } from './util/decipherWithAesKey';
const log = createLogger('AttachmentCrypto');
// This file was split from ts/Crypto.ts because it pulls things in from node, and
// too many things pull in Crypto.ts, so it broke storybook.
const DIGEST_LENGTH = MAC_LENGTH;
const HEX_DIGEST_LENGTH = DIGEST_LENGTH * 2;
const ATTACHMENT_MAC_LENGTH = MAC_LENGTH;
export class ReencryptedDigestMismatchError extends Error {}
/** @private */
export const KEY_SET_LENGTH = KEY_LENGTH + MAC_LENGTH;
export function _generateAttachmentIv(): Uint8Array {
return randomBytes(IV_LENGTH);
}
@ -78,25 +72,17 @@ export type EncryptedAttachmentV2 = {
export type ReencryptedAttachmentV2 = {
path: string;
iv: string;
plaintextHash: string;
digest: string;
localKey: string;
isReencryptableToSameDigest: boolean;
version: 2;
size: number;
};
export type ReencryptionInfo = {
iv: string;
key: string;
digest: string;
};
export type DecryptedAttachmentV2 = {
path: string;
iv: Uint8Array;
digest: string;
plaintextHash: string;
isReencryptableToSameDigest: boolean;
};
export type PlaintextSourceType =
@ -104,20 +90,9 @@ export type PlaintextSourceType =
| { stream: Readable; size?: number }
| { absolutePath: string };
export type HardcodedIVForEncryptionType =
| {
reason: 'test';
iv: Uint8Array;
}
| {
reason: 'reencrypting-for-backup';
iv: Uint8Array;
digestToMatch: Uint8Array;
};
type EncryptAttachmentV2OptionsType = Readonly<{
dangerousIv?: HardcodedIVForEncryptionType;
dangerousTestOnlySkipPadding?: boolean;
_testOnlyDangerousIv?: Uint8Array;
_testOnlyDangerousSkipPadding?: boolean;
keys: Readonly<Uint8Array>;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
@ -156,8 +131,8 @@ export async function encryptAttachmentV2ToDisk(
};
}
export async function encryptAttachmentV2({
dangerousIv,
dangerousTestOnlySkipPadding,
_testOnlyDangerousIv,
_testOnlyDangerousSkipPadding,
keys,
needIncrementalMac,
plaintext,
@ -169,33 +144,17 @@ export async function encryptAttachmentV2({
const { aesKey, macKey } = splitKeys(keys);
if (dangerousIv) {
if (dangerousIv.reason === 'test') {
if (getEnvironment() !== Environment.Test) {
throw new Error(
`${logId}: Used dangerousIv with reason test outside tests!`
);
}
} else if (dangerousIv.reason === 'reencrypting-for-backup') {
strictAssert(
dangerousIv.digestToMatch.byteLength === DIGEST_LENGTH,
`${logId}: Must provide valid digest to match if providing iv for re-encryption`
if (_testOnlyDangerousIv != null || _testOnlyDangerousSkipPadding != null) {
if (getEnvironment() !== Environment.Test) {
throw new Error(
`${logId}: Used _testOnlyDangerousIv or _testOnlyDangerousSkipPadding outside tests!`
);
log.info(
`${logId}: using hardcoded iv because we are re-encrypting for backup`
);
} else {
throw missingCaseError(dangerousIv);
}
}
if (dangerousTestOnlySkipPadding && getEnvironment() !== Environment.Test) {
throw new Error(
`${logId}: Used dangerousTestOnlySkipPadding outside tests!`
);
}
const iv = dangerousIv?.iv || _generateAttachmentIv();
const iv = isNotEmpty(_testOnlyDangerousIv)
? _testOnlyDangerousIv
: _generateAttachmentIv();
const plaintextHash = createHash(HashType.size256);
const digest = createHash(HashType.size256);
@ -241,7 +200,9 @@ export async function encryptAttachmentV2({
[
source,
peekAndUpdateHash(plaintextHash),
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
_testOnlyDangerousSkipPadding === true
? undefined
: appendPaddingStream(),
createCipheriv(CipherType.AES256CBC, aesKey, iv),
prependIv(iv),
appendMacStream(macKey, macValue => {
@ -265,11 +226,11 @@ export async function encryptAttachmentV2({
throw error;
}
const ourPlaintextHash = plaintextHash.digest('hex');
const ourPlaintextHash = plaintextHash.digest();
const ourDigest = digest.digest();
strictAssert(
ourPlaintextHash.length === HEX_DIGEST_LENGTH,
ourPlaintextHash.byteLength === PLAINTEXT_HASH_LENGTH,
`${logId}: Failed to generate plaintext hash!`
);
@ -281,14 +242,6 @@ export async function encryptAttachmentV2({
strictAssert(ciphertextSize != null, 'Failed to measure ciphertext size!');
strictAssert(mac != null, 'Failed to compute mac!');
if (dangerousIv?.reason === 'reencrypting-for-backup') {
if (!constantTimeEqual(ourDigest, dangerousIv.digestToMatch)) {
throw new ReencryptedDigestMismatchError(
`${logId}: iv was hardcoded for backup re-encryption, but digest does not match`
);
}
}
const incrementalMac = incrementalDigestCreator?.getFinalDigest();
return {
@ -300,11 +253,15 @@ export async function encryptAttachmentV2({
digest: ourDigest,
incrementalMac,
iv,
plaintextHash: ourPlaintextHash,
plaintextHash: toHex(ourPlaintextHash),
};
}
type DecryptAttachmentToSinkOptionsType = Readonly<
export type IntegrityCheckType =
| { type: 'plaintext'; plaintextHash: Readonly<Uint8Array> }
| { type: 'encrypted'; digest: Readonly<Uint8Array> };
export type DecryptAttachmentToSinkOptionsType = Readonly<
{
idForLogging: string;
size: number;
@ -323,15 +280,14 @@ type DecryptAttachmentToSinkOptionsType = Readonly<
(
| {
type: 'standard';
theirDigest: Readonly<Uint8Array>;
theirIncrementalMac: Readonly<Uint8Array> | undefined;
theirChunkSize: number | undefined;
integrityCheck: IntegrityCheckType;
}
| {
// No need to check integrity for locally reencrypted attachments, or for backup
// thumbnails (since we created it)
type: 'local' | 'backupThumbnail';
theirDigest?: undefined;
}
) &
(
@ -436,7 +392,7 @@ export async function decryptAttachmentV2ToSink(
: undefined;
const maybeOuterEncryptionGetIvAndDecipher = outerEncryption
? getIvAndDecipher(outerEncryption.aesKey)
? decipherWithAesKey(outerEncryption.aesKey)
: undefined;
const maybeOuterEncryptionGetMacAndUpdateMac = outerHmac
@ -445,9 +401,9 @@ export async function decryptAttachmentV2ToSink(
})
: undefined;
let isPaddingAllZeros = false;
let readFd: FileHandle | undefined;
let iv: Uint8Array | undefined;
let ourPlaintextHash: Uint8Array | undefined;
let ourDigest: Uint8Array | undefined;
let ciphertextStream: Readable;
try {
@ -474,16 +430,13 @@ export async function decryptAttachmentV2ToSink(
getMacAndUpdateHmac(hmac, theirMacValue => {
theirMac = theirMacValue;
}),
getIvAndDecipher(aesKey, theirIv => {
iv = theirIv;
}),
trimPadding(options.size, paddingAnalysis => {
isPaddingAllZeros = paddingAnalysis.isPaddingAllZeros;
}),
decipherWithAesKey(aesKey),
trimPadding(options.size),
peekAndUpdateHash(plaintextHash),
finalStream(() => {
const ourMac = hmac.digest();
const ourDigest = digest.digest();
ourDigest = digest.digest();
ourPlaintextHash = plaintextHash.digest();
strictAssert(
ourMac.byteLength === ATTACHMENT_MAC_LENGTH,
@ -497,6 +450,10 @@ export async function decryptAttachmentV2ToSink(
ourDigest.byteLength === DIGEST_LENGTH,
`${logId}: Failed to generate ourDigest!`
);
strictAssert(
ourPlaintextHash.byteLength === DIGEST_LENGTH,
`${logId}: Failed to generate ourPlaintextHash!`
);
if (!constantTimeEqual(ourMac, theirMac)) {
throw new Error(`${logId}: Bad MAC`);
@ -506,12 +463,15 @@ export async function decryptAttachmentV2ToSink(
switch (type) {
case 'local':
case 'backupThumbnail':
// Skip digest check
// No integrity check needed, these are generated by us
break;
case 'standard':
if (!constantTimeEqual(ourDigest, options.theirDigest)) {
throw new Error(`${logId}: Bad digest`);
}
checkIntegrity({
locallyCalculatedDigest: ourDigest,
locallyCalculatedPlaintextHash: ourPlaintextHash,
integrityCheck: options.integrityCheck,
logId,
});
break;
default:
throw missingCaseError(type);
@ -561,25 +521,19 @@ export async function decryptAttachmentV2ToSink(
await readFd?.close();
}
const ourPlaintextHash = plaintextHash.digest('hex');
strictAssert(
ourPlaintextHash.length === HEX_DIGEST_LENGTH,
`${logId}: Failed to generate file hash!`
ourPlaintextHash != null && ourPlaintextHash.byteLength === DIGEST_LENGTH,
`${logId}: Failed to generate plaintext hash!`
);
strictAssert(
iv != null && iv.byteLength === IV_LENGTH,
`${logId}: failed to find their iv`
ourDigest != null && ourDigest.byteLength === DIGEST_LENGTH,
`${logId}: Failed to generate digest!`
);
if (!isPaddingAllZeros) {
log.warn(`${logId}: Attachment had non-zero padding`);
}
return {
iv,
isReencryptableToSameDigest: isPaddingAllZeros,
plaintextHash: ourPlaintextHash,
plaintextHash: toHex(ourPlaintextHash),
digest: toBase64(ourDigest),
};
}
@ -620,10 +574,9 @@ export async function decryptAndReencryptLocally(
return {
localKey: toBase64(keys),
iv: toBase64(result.iv),
path: relativeTargetPath,
plaintextHash: result.plaintextHash,
isReencryptableToSameDigest: result.isReencryptableToSameDigest,
digest: result.digest,
version: 2,
size,
};
@ -652,8 +605,8 @@ export function splitKeys(keys: Uint8Array): AttachmentEncryptionKeysType {
keys.byteLength === KEY_SET_LENGTH,
`attachment keys must be ${KEY_SET_LENGTH} bytes, got ${keys.byteLength}`
);
const aesKey = keys.subarray(0, KEY_LENGTH);
const macKey = keys.subarray(KEY_LENGTH, KEY_SET_LENGTH);
const aesKey = keys.subarray(0, AES_KEY_LENGTH);
const macKey = keys.subarray(AES_KEY_LENGTH, KEY_SET_LENGTH);
return { aesKey, macKey };
}
@ -718,6 +671,39 @@ export function getAesCbcCiphertextLength(plaintextLength: number): number {
);
}
function checkIntegrity({
locallyCalculatedDigest,
locallyCalculatedPlaintextHash,
integrityCheck,
logId,
}: {
locallyCalculatedDigest: Uint8Array;
locallyCalculatedPlaintextHash: Uint8Array;
integrityCheck: IntegrityCheckType;
logId: string;
}): void {
const { type } = integrityCheck;
switch (type) {
case 'encrypted':
if (!constantTimeEqual(locallyCalculatedDigest, integrityCheck.digest)) {
throw new Error(`${logId}: Bad digest`);
}
break;
case 'plaintext':
if (
!constantTimeEqual(
locallyCalculatedPlaintextHash,
integrityCheck.plaintextHash
)
) {
throw new Error(`${logId}: Bad plaintextHash`);
}
break;
default:
throw missingCaseError(type);
}
}
/**
* Prepends the iv to the stream.
*/

View file

@ -81,6 +81,7 @@ const defaultGroups: Array<GroupListItemConversationType> = [
];
const backupMediaDownloadProgress = {
isBackupMediaEnabled: true,
downloadedBytes: 1024,
totalBytes: 4098,
downloadBannerDismissed: false,
@ -146,6 +147,7 @@ const useProps = (overrideProps: OverridePropsType = {}): PropsType => {
markedUnread: false,
},
backupMediaDownloadProgress: {
isBackupMediaEnabled: true,
downloadBannerDismissed: false,
isIdle: false,
isPaused: false,

View file

@ -65,6 +65,7 @@ import { EditState } from './ProfileEditor';
export type PropsType = {
backupMediaDownloadProgress: {
isBackupMediaEnabled: boolean;
totalBytes: number;
downloadedBytes: number;
isIdle: boolean;
@ -714,7 +715,9 @@ export function LeftPane({
modeSpecificProps.mode === LeftPaneMode.SetGroupMetadata;
const showBackupMediaDownloadProgress =
!hideHeader && !backupMediaDownloadProgress.downloadBannerDismissed;
!hideHeader &&
backupMediaDownloadProgress.isBackupMediaEnabled &&
!backupMediaDownloadProgress.downloadBannerDismissed;
const hasDialogs = dialogs.length ? !hideHeader : false;

View file

@ -191,7 +191,6 @@ export function FullyFilledOutTransientError(): JSX.Element {
avatar: {
avatar: fakeAttachment({
error: true,
iv: 'something',
key: 'something',
digest: 'something',
cdnKey: 'something',

View file

@ -2308,7 +2308,6 @@ EmbeddedContactAvatarTransientError.args = {
...fullContact,
avatar: {
avatar: fakeAttachment({
iv: 'something',
key: 'something',
digest: 'something',
cdnKey: 'something',

View file

@ -48,11 +48,13 @@ export const parseEnvironment = makeEnumParser(
export const isTestEnvironment = (env: Environment): boolean =>
env === Environment.Test;
export const isTestOrMockEnvironment = (): boolean => {
const isMockEnvironment = (): boolean => {
if (isMockTestEnvironment == null) {
log.error('Mock test environment not set');
}
return (
isTestEnvironment(getEnvironment()) || (isMockTestEnvironment ?? false)
);
return isMockTestEnvironment === true;
};
export const isTestOrMockEnvironment = (): boolean => {
return isTestEnvironment(getEnvironment()) || isMockEnvironment();
};

View file

@ -23,7 +23,6 @@ import {
getAttachmentCiphertextLength,
getAesCbcCiphertextLength,
decryptAttachmentV2ToSink,
ReencryptedDigestMismatchError,
} from '../AttachmentCrypto';
import {
getBackupMediaRootKey,
@ -46,6 +45,7 @@ import { fromBase64, toBase64 } from '../Bytes';
import type { WebAPIType } from '../textsecure/WebAPI';
import {
type AttachmentType,
canAttachmentHaveThumbnail,
mightStillBeOnTransitTier,
} from '../types/Attachment';
import {
@ -54,7 +54,6 @@ import {
makeVideoScreenshot,
} from '../types/VisualAttachment';
import { missingCaseError } from '../util/missingCaseError';
import { canAttachmentHaveThumbnail } from './AttachmentDownloadManager';
import {
isImageTypeSupported,
isVideoTypeSupported,
@ -117,7 +116,7 @@ export class AttachmentBackupManager extends JobManager<CoreAttachmentBackupJobT
): Promise<void> {
await this.addJob(job);
if (job.type === 'standard') {
if (canAttachmentHaveThumbnail(job.data.contentType)) {
if (canAttachmentHaveThumbnail({ contentType: job.data.contentType })) {
await this.addJob({
type: 'thumbnail',
mediaName: getMediaNameForAttachmentThumbnail(job.mediaName),
@ -221,13 +220,6 @@ export async function runAttachmentBackupJob(
return { status: 'finished' };
}
if (error instanceof ReencryptedDigestMismatchError) {
log.error(
`${logId}: Unable to reencrypt to match same digest; content must have changed`
);
return { status: 'finished' };
}
if (
error instanceof Error &&
'code' in error &&
@ -281,17 +273,8 @@ async function backupStandardAttachment(
) {
const jobIdForLogging = getJobIdForLogging(job);
const logId = `AttachmentBackupManager.backupStandardAttachment(${jobIdForLogging})`;
const {
contentType,
digest,
iv,
keys,
localKey,
path,
size,
transitCdnInfo,
version,
} = job.data;
const { contentType, keys, localKey, path, size, transitCdnInfo, version } =
job.data;
const mediaId = getMediaIdFromMediaName(job.mediaName);
const backupKeyMaterial = deriveBackupMediaKeyMaterial(
@ -349,8 +332,6 @@ async function backupStandardAttachment(
absolutePath,
contentType,
dependencies,
digest,
iv,
keys,
localKey,
logPrefix: logId,
@ -386,7 +367,7 @@ async function backupThumbnailAttachment(
const { fullsizePath, fullsizeSize, contentType, version, localKey } =
job.data;
if (!canAttachmentHaveThumbnail(contentType)) {
if (!canAttachmentHaveThumbnail({ contentType })) {
log.error(
`${logId}: cannot generate thumbnail for contentType: ${contentType}`
);
@ -468,8 +449,6 @@ type UploadToTransitTierArgsType = {
decryptAttachmentV2ToSink: typeof decryptAttachmentV2ToSink;
encryptAndUploadAttachment: typeof encryptAndUploadAttachment;
};
digest: string;
iv: string;
keys: string;
localKey?: string;
logPrefix: string;
@ -486,8 +465,6 @@ async function uploadToTransitTier({
absolutePath,
contentType,
dependencies,
digest,
iv,
keys,
localKey,
logPrefix,
@ -519,11 +496,6 @@ async function uploadToTransitTier({
sink
),
dependencies.encryptAndUploadAttachment({
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: fromBase64(iv),
digestToMatch: fromBase64(digest),
},
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { stream: sink, size },
@ -536,11 +508,6 @@ async function uploadToTransitTier({
// Legacy attachments
return dependencies.encryptAndUploadAttachment({
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: fromBase64(iv),
digestToMatch: fromBase64(digest),
},
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { absolutePath },

View file

@ -23,8 +23,11 @@ import {
type AttachmentType,
AttachmentVariant,
AttachmentPermanentlyUndownloadableError,
mightBeOnBackupTier,
mightBeInLocalBackup,
hasRequiredInformationForBackup,
wasImportedFromLocalBackup,
canAttachmentHaveThumbnail,
shouldAttachmentEndUpInRemoteBackup,
getUndownloadedAttachmentSignature,
} from '../types/Attachment';
import { type ReadonlyMessageAttributesType } from '../model-types.d';
import { getMessageById } from '../messages/getMessageById';
@ -42,11 +45,7 @@ import {
type JobManagerJobResultType,
type JobManagerJobType,
} from './JobManager';
import {
isImageTypeSupported,
isVideoTypeSupported,
} from '../util/GoogleChrome';
import { IMAGE_JPEG, type MIMEType } from '../types/MIME';
import { IMAGE_JPEG } from '../types/MIME';
import { AttachmentDownloadSource } from '../sql/Interface';
import { drop } from '../util/drop';
import {
@ -126,14 +125,14 @@ type AttachmentDownloadManagerParamsType = Omit<
};
function getJobId(job: CoreAttachmentDownloadJobType): string {
const { messageId, attachmentType, digest } = job;
return `${messageId}.${attachmentType}.${digest}`;
const { messageId, attachmentType, attachmentSignature } = job;
return `${messageId}.${attachmentType}.${attachmentSignature}`;
}
function getJobIdForLogging(job: CoreAttachmentDownloadJobType): string {
const { sentAt, attachmentType, digest } = job;
const redactedDigest = redactGenericText(digest);
return `${sentAt}.${attachmentType}.${redactedDigest}`;
const { sentAt, attachmentType, attachmentSignature } = job;
const redactedAttachmentSignature = redactGenericText(attachmentSignature);
return `${sentAt}.${attachmentType}.${redactedAttachmentSignature}`;
}
export class AttachmentDownloadManager extends JobManager<CoreAttachmentDownloadJobType> {
@ -185,7 +184,10 @@ export class AttachmentDownloadManager extends JobManager<CoreAttachmentDownload
getJobId,
getJobIdForLogging,
getRetryConfig: job =>
job.attachment.backupLocator?.mediaName
shouldAttachmentEndUpInRemoteBackup({
attachment: job.attachment,
hasMediaBackups: window.Signal.Services.backups.hasMediaBackups(),
})
? BACKUP_RETRY_CONFIG
: DEFAULT_RETRY_CONFIG,
maxConcurrentJobs: MAX_CONCURRENT_JOBS,
@ -278,21 +280,21 @@ export class AttachmentDownloadManager extends JobManager<CoreAttachmentDownload
attachmentType,
ciphertextSize: getAttachmentCiphertextLength(attachment.size),
contentType: attachment.contentType,
digest: attachment.digest,
attachmentSignature: getUndownloadedAttachmentSignature(attachment),
isManualDownload,
messageId,
receivedAt,
sentAt,
size: attachment.size,
// If the attachment does not have a backupLocator, we don't want to store it as a
// If the attachment cannot exist on the backup tier, we don't want to store it as a
// "backup import" attachment, since it's really just a normal attachment that we'll
// try to download from the transit tier (or it's an invalid attachment, etc.). We
// may need to extend the attachment_downloads table in the future to better
// differentiate source vs. location.
source:
mightBeOnBackupTier(attachment) || mightBeInLocalBackup(attachment)
? source
: AttachmentDownloadSource.STANDARD,
// differentiate source vs. location
// TODO: DESKTOP-8879
source: hasRequiredInformationForBackup(attachment)
? source
: AttachmentDownloadSource.STANDARD,
});
if (!parseResult.success) {
@ -473,10 +475,8 @@ async function runDownloadAttachmentJob({
};
}
if (
mightBeOnBackupTier(job.attachment) ||
mightBeInLocalBackup(job.attachment)
) {
// TODO: DESKTOP-8879
if (job.source === AttachmentDownloadSource.BACKUP_IMPORT) {
const currentDownloadedSize =
window.storage.get('backupMediaDownloadCompletedBytes') ?? 0;
drop(
@ -624,13 +624,25 @@ export async function runDownloadAttachmentJobInner({
`${logId}: Text attachment was ${sizeInKib}kib, max is ${maxTextAttachmentSizeInKib}kib`
);
}
const hasMediaBackups = window.Signal.Services.backups.hasMediaBackups();
const mightBeInRemoteBackup = shouldAttachmentEndUpInRemoteBackup({
attachment,
hasMediaBackups,
});
const wasAttachmentImportedFromLocalBackup =
wasImportedFromLocalBackup(attachment);
const alreadyDownloadedBackupThumbnail = Boolean(
job.attachment.thumbnailFromBackup
);
const mightHaveBackupThumbnailToDownload =
!alreadyDownloadedBackupThumbnail &&
mightBeInRemoteBackup &&
canAttachmentHaveThumbnail(attachment) &&
!wasAttachmentImportedFromLocalBackup;
const preferBackupThumbnail =
isForCurrentlyVisibleMessage &&
mightHaveThumbnailOnBackupTier(job.attachment) &&
// TODO (DESKTOP-7204): check if thumbnail exists on attachment, not on job
!job.attachment.thumbnailFromBackup &&
!mightBeInLocalBackup(attachment);
isForCurrentlyVisibleMessage && mightHaveBackupThumbnailToDownload;
if (preferBackupThumbnail) {
logId += '.preferringBackupThumbnail';
@ -705,6 +717,7 @@ export async function runDownloadAttachmentJobInner({
variant: AttachmentVariant.Default,
onSizeUpdate: throttle(onSizeUpdate, 200),
abortSignal,
hasMediaBackups,
},
});
@ -757,11 +770,7 @@ export async function runDownloadAttachmentJobInner({
);
return { downloadedVariant: AttachmentVariant.Default };
} catch (error) {
if (
!job.attachment.thumbnailFromBackup &&
mightHaveThumbnailOnBackupTier(attachment) &&
!preferBackupThumbnail
) {
if (mightHaveBackupThumbnailToDownload && !preferBackupThumbnail) {
log.error(
`${logId}: failed to download fullsize attachment, falling back to backup thumbnail`,
Errors.toLogFormat(error)
@ -839,6 +848,7 @@ async function downloadBackupThumbnail({
onSizeUpdate: noop,
variant: AttachmentVariant.ThumbnailFromBackup,
abortSignal,
hasMediaBackups: true,
},
});
@ -871,17 +881,3 @@ function _markAttachmentAsTransientlyErrored(
): AttachmentType {
return { ...attachment, pending: false, error: true };
}
function mightHaveThumbnailOnBackupTier(
attachment: Pick<AttachmentType, 'backupLocator' | 'contentType'>
): boolean {
if (!attachment.backupLocator?.mediaName) {
return false;
}
return canAttachmentHaveThumbnail(attachment.contentType);
}
export function canAttachmentHaveThumbnail(contentType: MIMEType): boolean {
return isVideoTypeSupported(contentType) || isImageTypeSupported(contentType);
}

View file

@ -6,7 +6,10 @@ import * as Bytes from '../Bytes';
import type { AttachmentDownloadJobTypeType } from '../types/AttachmentDownload';
import type { AttachmentType } from '../types/Attachment';
import { getAttachmentSignatureSafe, isDownloaded } from '../types/Attachment';
import {
doAttachmentsOnSameMessageMatch,
isDownloaded,
} from '../types/Attachment';
import { getMessageById } from '../messages/getMessageById';
import { trimMessageWhitespace } from '../types/BodyRange';
@ -73,11 +76,6 @@ export async function addAttachmentToMessage(
return;
}
const attachmentSignature = getAttachmentSignatureSafe(attachment);
if (!attachmentSignature) {
log.error(`${logPrefix}: Attachment did not have valid signature (digest)`);
}
if (type === 'long-message') {
let handledAnywhere = false;
let attachmentData: Uint8Array | undefined;
@ -100,8 +98,7 @@ export async function addAttachmentToMessage(
}
// This attachment isn't destined for this edit
if (
getAttachmentSignatureSafe(edit.bodyAttachment) !==
attachmentSignature
!doAttachmentsOnSameMessageMatch(edit.bodyAttachment, attachment)
) {
return edit;
}
@ -138,8 +135,7 @@ export async function addAttachmentToMessage(
return;
}
if (
getAttachmentSignatureSafe(existingBodyAttachment) !==
attachmentSignature
!doAttachmentsOnSameMessageMatch(existingBodyAttachment, attachment)
) {
return;
}
@ -179,7 +175,7 @@ export async function addAttachmentToMessage(
return existing;
}
if (attachmentSignature !== getAttachmentSignatureSafe(existing)) {
if (!doAttachmentsOnSameMessageMatch(existing, attachment)) {
return existing;
}

View file

@ -125,12 +125,7 @@ import {
isGIF,
isDownloaded,
} from '../../types/Attachment';
import {
getFilePointerForAttachment,
getLocalBackupFilePointerForAttachment,
maybeGetBackupJobForAttachmentAndFilePointer,
maybeGetLocalBackupJobForAttachmentAndFilePointer,
} from './util/filePointers';
import { getFilePointerForAttachment } from './util/filePointers';
import { getBackupMediaRootKey } from './crypto';
import type {
CoreAttachmentBackupJobType,
@ -151,7 +146,11 @@ import { SeenStatus } from '../../MessageSeenStatus';
import { migrateAllMessages } from '../../messages/migrateMessageData';
import { trimBody } from '../../util/longAttachment';
import { generateBackupsSubscriberData } from '../../util/backupSubscriptionData';
import { getEnvironment, isTestEnvironment } from '../../environment';
import {
getEnvironment,
isTestEnvironment,
isTestOrMockEnvironment,
} from '../../environment';
import { calculateLightness } from '../../util/getHSL';
import { isSignalServiceId } from '../../util/isSignalConversation';
import { isValidE164 } from '../../util/isValidE164';
@ -161,7 +160,8 @@ import { getTypingIndicatorSetting } from '../../types/Util';
const log = createLogger('export');
const MAX_CONCURRENCY = 10;
// Temporarily limited to preserve the received_at order
const MAX_CONCURRENCY = 1;
// We want a very generous timeout to make sure that we always resume write
// access to the database.
@ -846,6 +846,7 @@ export class BackupExportStream extends Readable {
const currencyCode = storage.get('subscriberCurrencyCode');
const backupsSubscriberData = generateBackupsSubscriberData();
const backupTier = storage.get('backupTier');
return {
profileKey: storage.get('profileKey'),
@ -903,6 +904,11 @@ export class BackupExportStream extends Readable {
// it builds `customColorIdByUuid`
customChatColors: this.#toCustomChatColors(),
defaultChatStyle: this.#toDefaultChatStyle(),
backupTier: backupTier != null ? Long.fromNumber(backupTier) : null,
// Test only values
...(isTestOrMockEnvironment()
? { optimizeOnDeviceStorage: storage.get('optimizeOnDeviceStorage') }
: {}),
},
};
}
@ -2578,20 +2584,15 @@ export class BackupExportStream extends Readable {
isLocalBackup: boolean;
messageReceivedAt: number;
}): Promise<Backups.FilePointer> {
// We need to always get updatedAttachment in case the attachment wasn't reencryptable
// to the original digest. In that case mediaName will be based on updatedAttachment.
const { filePointer, updatedAttachment } = isLocalBackup
? await getLocalBackupFilePointerForAttachment({
attachment,
backupLevel,
getBackupCdnInfo,
})
: await getFilePointerForAttachment({
attachment,
backupLevel,
getBackupCdnInfo,
});
const { filePointer, backupJob } = await getFilePointerForAttachment({
attachment,
isLocalBackup,
backupLevel,
messageReceivedAt,
getBackupCdnInfo,
});
// TODO: DESKTOP-8887
if (isLocalBackup && filePointer.localLocator) {
// Duplicate attachment check. Local backups can only contain 1 file per mediaName,
// so if we see a duplicate mediaName then we must reuse the previous FilePointer.
@ -2616,36 +2617,8 @@ export class BackupExportStream extends Readable {
this.#mediaNamesToFilePointers.set(mediaName, filePointer);
}
if (updatedAttachment) {
// TODO (DESKTOP-6688): ensure that we update the message/attachment in DB with the
// new keys so that we don't try to re-upload it again on the next export
}
// We don't download attachments during integration tests and thus have no
// "iv" for an attachment and can't create a job
if (this.backupType !== BackupType.TestOnlyPlaintext) {
let backupJob:
| CoreAttachmentBackupJobType
| PartialAttachmentLocalBackupJobType
| null;
if (isLocalBackup) {
backupJob = await maybeGetLocalBackupJobForAttachmentAndFilePointer({
attachment: updatedAttachment ?? attachment,
filePointer,
});
} else {
backupJob = await maybeGetBackupJobForAttachmentAndFilePointer({
attachment: updatedAttachment ?? attachment,
filePointer,
getBackupCdnInfo,
messageReceivedAt,
});
}
if (backupJob) {
this.#attachmentBackupJobs.push(backupJob);
}
if (backupJob) {
this.#attachmentBackupJobs.push(backupJob);
}
return filePointer;

View file

@ -124,7 +124,11 @@ import {
resetBackupMediaDownloadProgress,
startBackupMediaDownload,
} from '../../util/backupMediaDownload';
import { getEnvironment, isTestEnvironment } from '../../environment';
import {
getEnvironment,
isTestEnvironment,
isTestOrMockEnvironment,
} from '../../environment';
import { hasAttachmentDownloads } from '../../util/hasAttachmentDownloads';
import { isAdhoc, isNightly } from '../../util/version';
import { ToastType } from '../../types/Toast';
@ -820,6 +824,16 @@ export class BackupImportStream extends Writable {
await storage.put('svrPin', svrPin);
}
if (isTestOrMockEnvironment()) {
// Only relevant for tests
await storage.put(
'optimizeOnDeviceStorage',
accountSettings?.optimizeOnDeviceStorage === true
);
}
await storage.put('backupTier', accountSettings?.backupTier?.toNumber());
const { PhoneNumberSharingMode: BackupMode } = Backups.AccountData;
switch (accountSettings?.phoneNumberSharingMode) {
case BackupMode.EVERYBODY:

View file

@ -25,7 +25,6 @@ import { DelimitedStream } from '../../util/DelimitedStream';
import { appendPaddingStream } from '../../util/logPadding';
import { prependStream } from '../../util/prependStream';
import { appendMacStream } from '../../util/appendMacStream';
import { getIvAndDecipher } from '../../util/getIvAndDecipher';
import { getMacAndUpdateHmac } from '../../util/getMacAndUpdateHmac';
import { missingCaseError } from '../../util/missingCaseError';
import { DAY, HOUR, MINUTE } from '../../util/durations';
@ -84,6 +83,7 @@ import {
validateLocalBackupStructure,
} from './util/localBackup';
import { AttachmentLocalBackupManager } from '../../jobs/AttachmentLocalBackupManager';
import { decipherWithAesKey } from '../../util/decipherWithAesKey';
const log = createLogger('index');
@ -630,7 +630,7 @@ export class BackupsService {
createBackupStream(),
getMacAndUpdateHmac(hmac, noop),
progressReporter,
getIvAndDecipher(aesKey),
decipherWithAesKey(aesKey),
createGunzip(),
new DelimitedStream(),
importStream,
@ -1114,6 +1114,10 @@ export class BackupsService {
return result;
}
hasMediaBackups(): boolean {
return window.storage.get('backupTier') === BackupLevel.Paid;
}
getCachedCloudBackupStatus(): BackupStatusType | undefined {
return window.storage.get('cloudBackupStatus');
}

View file

@ -4,6 +4,12 @@
import type { AciString, PniString } from '../../types/ServiceId';
import type { ConversationColorType } from '../../types/Colors';
// Duplicated here to allow loading it in a non-node environment
export enum BackupLevel {
Free = 200,
Paid = 201,
}
export type AboutMe = {
aci: AciString;
pni?: PniString;

View file

@ -1,9 +1,7 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import Long from 'long';
import { BackupLevel } from '@signalapp/libsignal-client/zkgroup';
import { omit } from 'lodash';
import { existsSync } from 'node:fs';
import { BackupLevel } from '@signalapp/libsignal-client/zkgroup';
import {
APPLICATION_OCTET_STREAM,
@ -12,19 +10,15 @@ import {
import { createLogger } from '../../../logging/log';
import {
type AttachmentType,
isDownloadableFromTransitTier,
isDownloadableFromBackupTier,
isAttachmentLocallySaved,
type AttachmentDownloadableFromTransitTier,
type AttachmentDownloadableFromBackupTier,
isDecryptable,
isReencryptableToSameDigest,
isReencryptableWithNewEncryptionInfo,
type ReencryptableAttachment,
hasRequiredInformationForBackup,
hasRequiredInformationToDownloadFromTransitTier,
} from '../../../types/Attachment';
import { Backups, SignalService } from '../../../protobuf';
import * as Bytes from '../../../Bytes';
import { getTimestampFromLong } from '../../../util/timestampLongUtils';
import {
getSafeLongFromTimestamp,
getTimestampFromLong,
} from '../../../util/timestampLongUtils';
import { strictAssert } from '../../../util/assert';
import type {
CoreAttachmentBackupJobType,
@ -33,18 +27,17 @@ import type {
import {
type GetBackupCdnInfoType,
getMediaIdFromMediaName,
getMediaNameForAttachment,
getMediaNameFromDigest,
type BackupCdnInfoType,
getMediaName,
} from './mediaId';
import { redactGenericText } from '../../../util/privacy';
import { missingCaseError } from '../../../util/missingCaseError';
import { toLogFormat } from '../../../types/errors';
import { bytesToUuid } from '../../../util/uuidToBytes';
import { createName } from '../../../util/attachmentPath';
import { ensureAttachmentIsReencryptable } from '../../../util/ensureAttachmentIsReencryptable';
import type { ReencryptionInfo } from '../../../AttachmentCrypto';
import { generateAttachmentKeys } from '../../../AttachmentCrypto';
import { getAttachmentLocalBackupPathFromSnapshotDir } from './localBackup';
import {
isValidAttachmentKey,
isValidPlaintextHash,
} from '../../../types/Crypto';
const log = createLogger('filePointers');
@ -67,14 +60,12 @@ export function convertFilePointerToAttachment(
blurHash,
incrementalMac,
incrementalMacChunkSize,
attachmentLocator,
backupLocator,
invalidAttachmentLocator,
localLocator,
locatorInfo,
} = filePointer;
const doCreateName = options._createName ?? createName;
const commonProps: Omit<AttachmentType, 'size'> = {
const commonProps: AttachmentType = {
size: 0,
contentType: contentType
? stringToMIMEType(contentType)
: APPLICATION_OCTET_STREAM,
@ -88,16 +79,125 @@ export function convertFilePointerToAttachment(
downloadPath: doCreateName(),
};
if (incrementalMac?.length && incrementalMacChunkSize) {
if (Bytes.isNotEmpty(incrementalMac) && incrementalMacChunkSize) {
commonProps.incrementalMac = Bytes.toBase64(incrementalMac);
commonProps.chunkSize = incrementalMacChunkSize;
}
if (locatorInfo) {
const {
key,
localKey,
legacyDigest,
legacyMediaName,
plaintextHash,
encryptedDigest,
size,
transitCdnKey,
transitCdnNumber,
transitTierUploadTimestamp,
mediaTierCdnNumber,
} = locatorInfo;
if (!Bytes.isNotEmpty(key)) {
return {
...commonProps,
error: true,
size: 0,
downloadPath: undefined,
};
}
const digest = Bytes.isNotEmpty(encryptedDigest)
? encryptedDigest
: legacyDigest;
let mediaName: string | undefined;
if (Bytes.isNotEmpty(plaintextHash) && Bytes.isNotEmpty(key)) {
mediaName =
getMediaName({
key,
plaintextHash,
}) ?? undefined;
} else if (legacyMediaName) {
mediaName = legacyMediaName;
}
let localBackupPath: string | undefined;
if (Bytes.isNotEmpty(localKey)) {
const { localBackupSnapshotDir } = options;
strictAssert(
localBackupSnapshotDir,
'localBackupSnapshotDir is required for filePointer.localLocator'
);
if (mediaName) {
localBackupPath = getAttachmentLocalBackupPathFromSnapshotDir(
mediaName,
localBackupSnapshotDir
);
} else {
log.error(
'convertFilePointerToAttachment: localKey but no plaintextHash'
);
}
}
return {
...commonProps,
key: Bytes.toBase64(key),
digest: Bytes.isNotEmpty(digest) ? Bytes.toBase64(digest) : undefined,
size: size ?? 0,
cdnKey: transitCdnKey ?? undefined,
cdnNumber: transitCdnNumber ?? undefined,
uploadTimestamp: transitTierUploadTimestamp
? getTimestampFromLong(transitTierUploadTimestamp)
: undefined,
plaintextHash: Bytes.isNotEmpty(plaintextHash)
? Bytes.toHex(plaintextHash)
: undefined,
localBackupPath,
// TODO: DESKTOP-8883
localKey: Bytes.isNotEmpty(localKey)
? Bytes.toBase64(localKey)
: undefined,
...(mediaName && mediaTierCdnNumber != null
? {
backupCdnNumber: mediaTierCdnNumber,
}
: {}),
};
}
return {
...commonProps,
...getAttachmentLocatorInfoFromLegacyLocators(filePointer, options),
};
}
function getAttachmentLocatorInfoFromLegacyLocators(
filePointer: Backups.FilePointer,
options: Partial<ConvertFilePointerToAttachmentOptions>
) {
const {
attachmentLocator,
backupLocator,
localLocator,
invalidAttachmentLocator,
} = filePointer;
if (invalidAttachmentLocator) {
return {
error: true,
downloadPath: undefined,
};
}
if (attachmentLocator) {
const { cdnKey, cdnNumber, key, digest, uploadTimestamp, size } =
attachmentLocator;
return {
...commonProps,
size: size ?? 0,
cdnKey: cdnKey ?? undefined,
cdnNumber: cdnNumber ?? undefined,
@ -109,6 +209,7 @@ export function convertFilePointerToAttachment(
};
}
// These are legacy locators so the mediaName would not be correct
if (backupLocator) {
const {
mediaName,
@ -121,18 +222,16 @@ export function convertFilePointerToAttachment(
} = backupLocator;
return {
...commonProps,
cdnKey: transitCdnKey ?? undefined,
cdnNumber: transitCdnNumber ?? undefined,
key: key?.length ? Bytes.toBase64(key) : undefined,
digest: digest?.length ? Bytes.toBase64(digest) : undefined,
size: size ?? 0,
backupLocator: mediaName
...(mediaName && cdnNumber != null
? {
mediaName,
cdnNumber: cdnNumber ?? undefined,
backupCdnNumber: cdnNumber,
}
: undefined,
: {}),
};
}
@ -140,7 +239,6 @@ export function convertFilePointerToAttachment(
const {
mediaName,
localKey,
backupCdnNumber,
remoteKey: key,
remoteDigest: digest,
size,
@ -159,9 +257,8 @@ export function convertFilePointerToAttachment(
'convertFilePointerToAttachment: filePointer.localLocator missing mediaName!'
);
return {
...omit(commonProps, 'downloadPath'),
error: true,
size: 0,
downloadPath: undefined,
};
}
const localBackupPath = getAttachmentLocalBackupPathFromSnapshotDir(
@ -170,7 +267,6 @@ export function convertFilePointerToAttachment(
);
return {
...commonProps,
cdnKey: transitCdnKey ?? undefined,
cdnNumber: transitCdnNumber ?? undefined,
key: key?.length ? Bytes.toBase64(key) : undefined,
@ -178,23 +274,11 @@ export function convertFilePointerToAttachment(
size: size ?? 0,
localBackupPath,
localKey: localKey?.length ? Bytes.toBase64(localKey) : undefined,
backupLocator: backupCdnNumber
? {
mediaName,
cdnNumber: backupCdnNumber,
}
: undefined,
};
}
if (!invalidAttachmentLocator) {
log.error('convertFilePointerToAttachment: filePointer had no locator');
}
return {
...omit(commonProps, 'downloadPath'),
error: true,
size: 0,
downloadPath: undefined,
};
}
@ -236,17 +320,21 @@ export function convertBackupMessageAttachmentToAttachment(
export async function getFilePointerForAttachment({
attachment,
backupLevel,
getBackupCdnInfo,
backupLevel,
messageReceivedAt,
isLocalBackup = false,
}: {
attachment: Readonly<AttachmentType>;
backupLevel: BackupLevel;
getBackupCdnInfo: GetBackupCdnInfoType;
backupLevel: BackupLevel;
messageReceivedAt: number;
isLocalBackup?: boolean;
}): Promise<{
filePointer: Backups.FilePointer;
updatedAttachment?: AttachmentType;
backupJob?: CoreAttachmentBackupJobType | PartialAttachmentLocalBackupJobType;
}> {
const filePointerRootProps = new Backups.FilePointer({
const filePointer = new Backups.FilePointer({
contentType: attachment.contentType,
fileName: attachment.fileName,
width: attachment.width,
@ -265,431 +353,174 @@ export async function getFilePointerForAttachment({
incrementalMacChunkSize: undefined,
}),
});
const logId = `getFilePointerForAttachment(${redactGenericText(
attachment.digest ?? ''
)})`;
if (attachment.size == null) {
log.warn(`${logId}: attachment had nullish size, dropping`);
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
invalidAttachmentLocator: getInvalidAttachmentLocator(),
}),
};
}
if (!isAttachmentLocallySaved(attachment)) {
// 1. If the attachment is undownloaded, we cannot trust its digest / mediaName. Thus,
// we only include a BackupLocator if this attachment already had one (e.g. we
// restored it from a backup and it had a BackupLocator then, which means we have at
// one point in the past verified the digest).
if (
isDownloadableFromBackupTier(attachment) &&
backupLevel === BackupLevel.Paid
) {
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
backupLocator: getBackupLocator(attachment),
}),
};
}
// 2. Otherwise, we only return the transit CDN info via AttachmentLocator
if (isDownloadableFromTransitTier(attachment)) {
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
attachmentLocator: getAttachmentLocator(attachment),
}),
};
}
// 3. Otherwise, we don't have the attachment, and we don't have info to download it
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
invalidAttachmentLocator: getInvalidAttachmentLocator(),
}),
};
}
// The attachment is locally saved
if (backupLevel !== BackupLevel.Paid) {
// 1. If we have information to donwnload the file from the transit tier, great, let's
// just create an attachmentLocator so the restorer can try to download from the
// transit tier
if (isDownloadableFromTransitTier(attachment)) {
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
attachmentLocator: getAttachmentLocator(attachment),
}),
};
}
// 2. Otherwise, we have the attachment locally, but we don't have information to put
// in the backup proto to allow the restorer to download it. (This shouldn't
// happen!)
log.warn(
`${logId}: Attachment is downloaded but we lack information to decrypt it`
);
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
invalidAttachmentLocator: getInvalidAttachmentLocator(),
}),
};
}
// From here on, this attachment is headed to (or already on) the backup tier!
const mediaNameForCurrentVersionOfAttachment = attachment.digest
? getMediaNameForAttachment(attachment)
: undefined;
const backupCdnInfo: BackupCdnInfoType =
mediaNameForCurrentVersionOfAttachment
? await getBackupCdnInfo(
getMediaIdFromMediaName(mediaNameForCurrentVersionOfAttachment).string
)
: { isInBackupTier: false };
// If we have key & digest for this attachment and it's already on backup tier, we can
// reference it
if (isDecryptable(attachment) && backupCdnInfo.isInBackupTier) {
strictAssert(mediaNameForCurrentVersionOfAttachment, 'must exist');
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
backupLocator: getBackupLocator({
...attachment,
backupLocator: {
mediaName: mediaNameForCurrentVersionOfAttachment,
cdnNumber: backupCdnInfo.isInBackupTier
? backupCdnInfo.cdnNumber
: undefined,
},
}),
}),
};
}
let reencryptableAttachment: ReencryptableAttachment;
try {
reencryptableAttachment = await ensureAttachmentIsReencryptable(attachment);
} catch (e) {
log.warn('Unable to ensure attachment is reencryptable', toLogFormat(e));
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
invalidAttachmentLocator: getInvalidAttachmentLocator(),
}),
};
}
// If we've confirmed that we can re-encrypt this attachment to the same digest, we can
// generate a backupLocator (and upload the file)
if (isReencryptableToSameDigest(reencryptableAttachment)) {
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
backupLocator: getBackupLocator({
...reencryptableAttachment,
backupLocator: {
mediaName: getMediaNameFromDigest(reencryptableAttachment.digest),
cdnNumber: backupCdnInfo.isInBackupTier
? backupCdnInfo.cdnNumber
: undefined,
},
}),
}),
updatedAttachment: reencryptableAttachment,
};
}
strictAssert(
reencryptableAttachment.reencryptionInfo,
'Reencryption info must exist if not reencryptable to original digest'
);
const mediaNameForNewEncryptionInfo = getMediaNameFromDigest(
reencryptableAttachment.reencryptionInfo.digest
);
const backupCdnInfoForNewEncryptionInfo = await getBackupCdnInfo(
getMediaIdFromMediaName(mediaNameForNewEncryptionInfo).string
);
return {
filePointer: new Backups.FilePointer({
...filePointerRootProps,
backupLocator: getBackupLocator({
size: reencryptableAttachment.size,
...reencryptableAttachment.reencryptionInfo,
backupLocator: {
mediaName: mediaNameForNewEncryptionInfo,
cdnNumber: backupCdnInfoForNewEncryptionInfo.isInBackupTier
? backupCdnInfoForNewEncryptionInfo.cdnNumber
: undefined,
},
}),
}),
updatedAttachment: reencryptableAttachment,
};
}
// Given a remote backup FilePointer, return a FilePointer referencing a local backup
export async function getLocalBackupFilePointerForAttachment({
attachment,
backupLevel,
getBackupCdnInfo,
}: {
attachment: Readonly<AttachmentType>;
backupLevel: BackupLevel;
getBackupCdnInfo: GetBackupCdnInfoType;
}): Promise<{
filePointer: Backups.FilePointer;
updatedAttachment?: AttachmentType;
}> {
const { filePointer: remoteFilePointer, updatedAttachment } =
await getFilePointerForAttachment({
attachment,
backupLevel,
getBackupCdnInfo,
});
// If a file disappeared locally (maybe we downloaded it and it disappeared)
// or localKey is missing, then we can't export to a local backup.
// Fallback to the filePointer which would have been generated for a remote backup.
const isAttachmentMissingLocally =
attachment.path == null ||
!existsSync(
window.Signal.Migrations.getAbsoluteAttachmentPath(attachment.path)
);
if (isAttachmentMissingLocally || attachment.localKey == null) {
return { filePointer: remoteFilePointer, updatedAttachment };
}
if (remoteFilePointer.backupLocator) {
const { backupLocator } = remoteFilePointer;
const { mediaName } = backupLocator;
strictAssert(
mediaName,
'getLocalBackupFilePointerForAttachment: BackupLocator must have mediaName'
);
const localLocator = new Backups.FilePointer.LocalLocator({
mediaName,
localKey: Bytes.fromBase64(attachment.localKey),
remoteKey: backupLocator.key,
remoteDigest: backupLocator.digest,
size: backupLocator.size,
backupCdnNumber: backupLocator.cdnNumber,
transitCdnKey: backupLocator.transitCdnKey,
transitCdnNumber: backupLocator.transitCdnNumber,
});
return {
filePointer: {
...omit(remoteFilePointer, 'backupLocator'),
localLocator,
},
updatedAttachment,
};
}
if (remoteFilePointer.attachmentLocator) {
const { attachmentLocator } = remoteFilePointer;
const { digest } = attachmentLocator;
strictAssert(
digest,
'getLocalBackupFilePointerForAttachment: AttachmentLocator must have digest'
);
const mediaName = getMediaNameFromDigest(Bytes.toBase64(digest));
strictAssert(
mediaName,
'getLocalBackupFilePointerForAttachment: mediaName must be derivable from AttachmentLocator'
);
const localLocator = new Backups.FilePointer.LocalLocator({
mediaName,
localKey: Bytes.fromBase64(attachment.localKey),
remoteKey: attachmentLocator.key,
remoteDigest: attachmentLocator.digest,
size: attachmentLocator.size,
backupCdnNumber: undefined,
transitCdnKey: attachmentLocator.cdnKey,
transitCdnNumber: attachmentLocator.cdnNumber,
});
return {
filePointer: {
...omit(remoteFilePointer, 'attachmentLocator'),
localLocator,
},
updatedAttachment,
};
}
return { filePointer: remoteFilePointer, updatedAttachment };
}
function getAttachmentLocator(
attachment: AttachmentDownloadableFromTransitTier
) {
return new Backups.FilePointer.AttachmentLocator({
cdnKey: attachment.cdnKey,
cdnNumber: attachment.cdnNumber,
uploadTimestamp: attachment.uploadTimestamp
? Long.fromNumber(attachment.uploadTimestamp)
: null,
digest: Bytes.fromBase64(attachment.digest),
key: Bytes.fromBase64(attachment.key),
size: attachment.size,
const locatorInfo = getLocatorInfoForAttachment({
attachment,
isLocalBackup,
});
}
function getBackupLocator(
attachment: Pick<
AttachmentDownloadableFromBackupTier,
'backupLocator' | 'digest' | 'key' | 'size' | 'cdnKey' | 'cdnNumber'
>
) {
return new Backups.FilePointer.BackupLocator({
mediaName: attachment.backupLocator.mediaName,
cdnNumber: attachment.backupLocator.cdnNumber,
digest: Bytes.fromBase64(attachment.digest),
key: Bytes.fromBase64(attachment.key),
size: attachment.size,
transitCdnKey: attachment.cdnKey,
transitCdnNumber: attachment.cdnNumber,
});
}
function getInvalidAttachmentLocator() {
return new Backups.FilePointer.InvalidAttachmentLocator();
}
export async function maybeGetBackupJobForAttachmentAndFilePointer({
attachment,
filePointer,
getBackupCdnInfo,
messageReceivedAt,
}: {
attachment: AttachmentType;
filePointer: Backups.FilePointer;
getBackupCdnInfo: GetBackupCdnInfoType;
messageReceivedAt: number;
}): Promise<CoreAttachmentBackupJobType | null> {
if (!filePointer.backupLocator) {
return null;
if (locatorInfo) {
filePointer.locatorInfo = locatorInfo;
}
const { mediaName } = filePointer.backupLocator;
strictAssert(mediaName, 'mediaName must exist');
let backupJob:
| CoreAttachmentBackupJobType
| PartialAttachmentLocalBackupJobType
| undefined;
const { isInBackupTier } = await getBackupCdnInfo(
if (backupLevel !== BackupLevel.Paid && !isLocalBackup) {
return { filePointer, backupJob: undefined };
}
if (!Bytes.isNotEmpty(locatorInfo.plaintextHash)) {
return { filePointer, backupJob: undefined };
}
const mediaName = getMediaName({
plaintextHash: locatorInfo.plaintextHash,
key: locatorInfo.key,
});
const backupInfo = await getBackupCdnInfo(
getMediaIdFromMediaName(mediaName).string
);
if (isInBackupTier) {
return null;
if (backupInfo.isInBackupTier) {
if (locatorInfo.mediaTierCdnNumber !== backupInfo.cdnNumber) {
log.warn(
'backupCdnNumber on attachment differs from cdnNumber from list endpoint'
);
// Prefer the one from the list endpoint
locatorInfo.mediaTierCdnNumber = backupInfo.cdnNumber;
}
return { filePointer, backupJob: undefined };
}
strictAssert(
isAttachmentLocallySaved(attachment),
'Attachment must be saved locally for it to be backed up'
);
const { path, localKey, version, size } = attachment;
let encryptionInfo: ReencryptionInfo | undefined;
if (!path || !isValidAttachmentKey(localKey)) {
return { filePointer, backupJob: undefined };
}
if (isReencryptableToSameDigest(attachment)) {
encryptionInfo = {
iv: attachment.iv,
key: attachment.key,
digest: attachment.digest,
if (isLocalBackup) {
backupJob = {
mediaName,
type: 'local',
data: {
path,
size,
localKey,
},
};
} else {
strictAssert(
isReencryptableWithNewEncryptionInfo(attachment) === true,
'must have new encryption info'
);
encryptionInfo = attachment.reencryptionInfo;
backupJob = {
mediaName,
receivedAt: messageReceivedAt,
type: 'standard',
data: {
path,
localKey,
version,
contentType: attachment.contentType,
keys: Bytes.toBase64(locatorInfo.key),
size: locatorInfo.size,
transitCdnInfo:
locatorInfo.transitCdnKey && locatorInfo.transitCdnNumber != null
? {
cdnKey: locatorInfo.transitCdnKey,
cdnNumber: locatorInfo.transitCdnNumber,
uploadTimestamp:
locatorInfo.transitTierUploadTimestamp?.toNumber(),
}
: undefined,
},
};
}
strictAssert(
filePointer.backupLocator.digest,
'digest must exist on backupLocator'
);
strictAssert(
encryptionInfo.digest === Bytes.toBase64(filePointer.backupLocator.digest),
'digest on job and backupLocator must match'
);
const { path, contentType, size, uploadTimestamp, version, localKey } =
attachment;
const { transitCdnKey, transitCdnNumber } = filePointer.backupLocator;
return {
mediaName,
receivedAt: messageReceivedAt,
type: 'standard',
data: {
path,
contentType,
keys: encryptionInfo.key,
digest: encryptionInfo.digest,
iv: encryptionInfo.iv,
size,
version,
localKey,
transitCdnInfo:
transitCdnKey != null && transitCdnNumber != null
? {
cdnKey: transitCdnKey,
cdnNumber: transitCdnNumber,
uploadTimestamp,
}
: undefined,
},
};
return { filePointer, backupJob };
}
export async function maybeGetLocalBackupJobForAttachmentAndFilePointer({
attachment,
filePointer,
function getLocatorInfoForAttachment({
attachment: _rawAttachment,
isLocalBackup,
}: {
attachment: AttachmentType;
filePointer: Backups.FilePointer;
}): Promise<PartialAttachmentLocalBackupJobType | null> {
if (!filePointer.localLocator) {
return null;
isLocalBackup: boolean;
}): Backups.FilePointer.LocatorInfo {
const locatorInfo = new Backups.FilePointer.LocatorInfo();
const attachment = { ..._rawAttachment };
if (attachment.error) {
return locatorInfo;
}
strictAssert(
isAttachmentLocallySaved(attachment),
'Attachment must be saved locally for it to be backed up'
);
{
const isBackupable = hasRequiredInformationForBackup(attachment);
const isDownloadableFromTransitTier =
hasRequiredInformationToDownloadFromTransitTier(attachment);
const { path, size } = attachment;
if (!isBackupable && !isDownloadableFromTransitTier) {
// TODO: DESKTOP-8914
if (
isValidPlaintextHash(attachment.plaintextHash) &&
!isValidAttachmentKey(attachment.key)
) {
attachment.key = Bytes.toBase64(generateAttachmentKeys());
// Delete all info dependent on key
delete attachment.cdnKey;
delete attachment.cdnNumber;
delete attachment.uploadTimestamp;
delete attachment.digest;
delete attachment.backupCdnNumber;
// TODO: For local backups we don't want to double back up the same file, so
// we could check for the same file here and if it's found then return early.
strictAssert(
hasRequiredInformationForBackup(attachment),
'should be backupable with new key'
);
}
}
}
const isBackupable = hasRequiredInformationForBackup(attachment);
const isDownloadableFromTransitTier =
hasRequiredInformationToDownloadFromTransitTier(attachment);
const { localLocator } = filePointer;
if (!isBackupable && !isDownloadableFromTransitTier) {
return locatorInfo;
}
const { localKey: localKeyBytes, mediaName } = localLocator;
strictAssert(mediaName, 'mediaName must exist on localLocator');
strictAssert(localKeyBytes, 'localKey must exist');
locatorInfo.size = attachment.size;
locatorInfo.key = Bytes.fromBase64(attachment.key);
return {
type: 'local',
mediaName,
data: {
path,
size,
localKey: Bytes.toBase64(localKeyBytes),
},
};
if (isDownloadableFromTransitTier) {
locatorInfo.transitCdnKey = attachment.cdnKey;
locatorInfo.transitCdnNumber = attachment.cdnNumber;
locatorInfo.transitTierUploadTimestamp = getSafeLongFromTimestamp(
attachment.uploadTimestamp
);
}
if (isBackupable) {
locatorInfo.plaintextHash = Bytes.fromHex(attachment.plaintextHash);
// TODO: DESKTOP-8887
if (attachment.backupCdnNumber != null) {
locatorInfo.mediaTierCdnNumber = attachment.backupCdnNumber;
}
} else {
locatorInfo.encryptedDigest = Bytes.fromBase64(attachment.digest);
}
// TODO: DESKTOP-8904
if (isLocalBackup && isBackupable) {
const attachmentExistsLocally =
attachment.path != null &&
existsSync(
window.Signal.Migrations.getAbsoluteAttachmentPath(attachment.path)
);
if (attachmentExistsLocally && attachment.localKey) {
locatorInfo.localKey = Bytes.fromBase64(attachment.localKey);
}
}
return locatorInfo;
}

View file

@ -4,8 +4,7 @@
import { DataReader } from '../../../sql/Client';
import * as Bytes from '../../../Bytes';
import { getBackupMediaRootKey } from '../crypto';
import type { AttachmentType } from '../../../types/Attachment';
import { strictAssert } from '../../../util/assert';
import { type BackupableAttachmentType } from '../../../types/Attachment';
export function getMediaIdFromMediaName(mediaName: string): {
string: string;
@ -18,7 +17,7 @@ export function getMediaIdFromMediaName(mediaName: string): {
};
}
export function getMediaIdForAttachment(attachment: AttachmentType): {
export function getMediaIdForAttachment(attachment: BackupableAttachmentType): {
string: string;
bytes: Uint8Array;
} {
@ -26,7 +25,9 @@ export function getMediaIdForAttachment(attachment: AttachmentType): {
return getMediaIdFromMediaName(mediaName);
}
export function getMediaIdForAttachmentThumbnail(attachment: AttachmentType): {
export function getMediaIdForAttachmentThumbnail(
attachment: BackupableAttachmentType
): {
string: string;
bytes: Uint8Array;
} {
@ -36,16 +37,23 @@ export function getMediaIdForAttachmentThumbnail(attachment: AttachmentType): {
return getMediaIdFromMediaName(mediaName);
}
export function getMediaNameForAttachment(attachment: AttachmentType): string {
if (attachment.backupLocator) {
return attachment.backupLocator.mediaName;
}
strictAssert(attachment.digest, 'Digest must be present');
return getMediaNameFromDigest(attachment.digest);
export function getMediaNameForAttachment(
attachment: BackupableAttachmentType
): string {
return getMediaName({
plaintextHash: Bytes.fromHex(attachment.plaintextHash),
key: Bytes.fromBase64(attachment.key),
});
}
export function getMediaNameFromDigest(digest: string): string {
return Bytes.toHex(Bytes.fromBase64(digest));
export function getMediaName({
plaintextHash,
key,
}: {
plaintextHash: Uint8Array;
key: Uint8Array;
}): string {
return Bytes.toHex(Bytes.concatenate([plaintextHash, key]));
}
export function getMediaNameForAttachmentThumbnail(

View file

@ -25,6 +25,7 @@ import { strictAssert } from '../util/assert';
import type { ReencryptedAttachmentV2 } from '../AttachmentCrypto';
import { SECOND } from '../util/durations';
import { AttachmentVariant } from '../types/Attachment';
import { MediaTier } from '../types/AttachmentDownload';
const log = createLogger('contactSync');
@ -110,7 +111,7 @@ async function downloadAndParseContactAttachment(
const abortController = new AbortController();
downloaded = await downloadAttachment(
window.textsecure.server,
contactAttachment,
{ attachment: contactAttachment, mediaTier: MediaTier.STANDARD },
{
variant: AttachmentVariant.Default,
onSizeUpdate: noop,

View file

@ -80,9 +80,6 @@ type MigrationsModuleType = {
deleteSticker: (path: string) => Promise<void>;
deleteTempFile: (path: string) => Promise<void>;
doesAttachmentExist: (path: string) => Promise<boolean>;
ensureAttachmentIsReencryptable: (
attachment: TypesAttachment.LocallySavedAttachment
) => Promise<TypesAttachment.ReencryptableAttachment>;
getAbsoluteAttachmentPath: (path: string) => string;
getAbsoluteAvatarPath: (src: string) => string;
getAbsoluteBadgeImageFilePath: (path: string) => string;
@ -170,7 +167,6 @@ export function initializeMigrations({
createPlaintextReader,
createWriterForNew,
createDoesExist,
ensureAttachmentIsReencryptable,
getAvatarsPath,
getDraftPath,
getDownloadsPath,
@ -302,7 +298,6 @@ export function initializeMigrations({
deleteSticker,
deleteTempFile,
doesAttachmentExist,
ensureAttachmentIsReencryptable,
getAbsoluteAttachmentPath,
getAbsoluteAvatarPath,
getAbsoluteBadgeImageFilePath,
@ -326,7 +321,6 @@ export function initializeMigrations({
processNewAttachment: (attachment: AttachmentType) =>
MessageType.processNewAttachment(attachment, {
writeNewAttachmentData,
ensureAttachmentIsReencryptable,
makeObjectUrl,
revokeObjectUrl,
getImageDimensions,
@ -356,7 +350,6 @@ export function initializeMigrations({
return MessageType.upgradeSchema(message, {
deleteOnDisk,
doesAttachmentExist,
ensureAttachmentIsReencryptable,
getImageDimensions,
getRegionCode,
makeImageThumbnail,
@ -425,9 +418,6 @@ type AttachmentsModuleType = {
dirName?: string;
}) => Promise<null | { fullPath: string; name: string }>;
ensureAttachmentIsReencryptable: (
attachment: TypesAttachment.LocallySavedAttachment
) => Promise<TypesAttachment.ReencryptableAttachment>;
readAndDecryptDataFromDisk: (options: {
absolutePath: string;
keysBase64: string;

View file

@ -576,20 +576,14 @@ export const MESSAGE_ATTACHMENT_COLUMNS = [
'width',
'digest',
'key',
'iv',
'flags',
'downloadPath',
'transitCdnKey',
'transitCdnNumber',
'transitCdnUploadTimestamp',
'backupMediaName',
'backupCdnNumber',
'incrementalMac',
'incrementalMacChunkSize',
'isReencryptableToSameDigest',
'reencryptionIv',
'reencryptionKey',
'reencryptionDigest',
'thumbnailPath',
'thumbnailSize',
'thumbnailContentType',
@ -635,7 +629,6 @@ export type MessageAttachmentDBType = {
height: number | null;
flags: number | null;
key: string | null;
iv: string | null;
digest: string | null;
fileName: string | null;
incrementalMac: string | null;
@ -645,7 +638,6 @@ export type MessageAttachmentDBType = {
transitCdnKey: string | null;
transitCdnNumber: number | null;
transitCdnUploadTimestamp: number | null;
backupMediaName: string | null;
backupCdnNumber: number | null;
thumbnailPath: string | null;
thumbnailSize: number | null;
@ -662,9 +654,6 @@ export type MessageAttachmentDBType = {
backupThumbnailContentType: string | null;
backupThumbnailLocalKey: string | null;
backupThumbnailVersion: 1 | 2 | null;
reencryptionIv: string | null;
reencryptionKey: string | null;
reencryptionDigest: string | null;
storyTextAttachmentJson: string | null;
localBackupPath: string | null;
isCorrupted: 1 | 0 | null;
@ -672,7 +661,6 @@ export type MessageAttachmentDBType = {
error: 1 | 0 | null;
wasTooBig: 1 | 0 | null;
pending: 1 | 0 | null;
isReencryptableToSameDigest: 1 | 0 | null;
copiedFromQuotedAttachment: 1 | 0 | null;
};
@ -840,7 +828,7 @@ type ReadableInterface = {
_getAttachmentDownloadJob(
job: Pick<
AttachmentDownloadJobType,
'messageId' | 'attachmentType' | 'digest'
'messageId' | 'attachmentType' | 'attachmentSignature'
>
): AttachmentDownloadJobType | undefined;

View file

@ -301,7 +301,7 @@ type StickerPackRow = InstalledStickerPackRow &
type AttachmentDownloadJobRow = Readonly<{
messageId: string;
attachmentType: string;
digest: string;
attachmentSignature: string;
receivedAt: number;
sentAt: number;
contentType: string;
@ -2589,7 +2589,6 @@ function saveMessageAttachment({
width: attachment.width,
digest: attachment.digest,
key: attachment.key,
iv: attachment.iv,
fileName: attachment.fileName,
downloadPath: attachment.downloadPath,
transitCdnKey: attachment.cdnKey ?? attachment.cdnId,
@ -2597,29 +2596,13 @@ function saveMessageAttachment({
transitCdnUploadTimestamp: isNumber(attachment.uploadTimestamp)
? attachment.uploadTimestamp
: null,
backupMediaName: attachment.backupLocator?.mediaName,
backupCdnNumber: attachment.backupLocator?.cdnNumber,
backupCdnNumber: attachment.backupCdnNumber,
incrementalMac:
// resilience to Uint8Array-stored incrementalMac values
typeof attachment.incrementalMac === 'string'
? attachment.incrementalMac
: null,
incrementalMacChunkSize: attachment.chunkSize,
isReencryptableToSameDigest: convertOptionalBooleanToNullableInteger(
attachment.isReencryptableToSameDigest
),
reencryptionIv:
attachment.isReencryptableToSameDigest === false
? attachment.reencryptionInfo?.iv
: null,
reencryptionKey:
attachment.isReencryptableToSameDigest === false
? attachment.reencryptionInfo?.key
: null,
reencryptionDigest:
attachment.isReencryptableToSameDigest === false
? attachment.reencryptionInfo?.digest
: null,
thumbnailPath: attachment.thumbnail?.path,
thumbnailSize: attachment.thumbnail?.size,
thumbnailContentType: attachment.thumbnail?.contentType,
@ -5452,7 +5435,7 @@ function _getAttachmentDownloadJob(
db: ReadableDB,
job: Pick<
AttachmentDownloadJobType,
'messageId' | 'attachmentType' | 'digest'
'messageId' | 'attachmentType' | 'attachmentSignature'
>
): AttachmentDownloadJobType | undefined {
const [query, params] = sql`
@ -5462,7 +5445,7 @@ function _getAttachmentDownloadJob(
AND
attachmentType = ${job.attachmentType}
AND
digest = ${job.digest};
attachmentSignature = ${job.attachmentSignature};
`;
const row = db.prepare(query).get<AttachmentDownloadJobRow>(params);
@ -5620,7 +5603,7 @@ function saveAttachmentDownloadJob(
INSERT OR REPLACE INTO attachment_downloads (
messageId,
attachmentType,
digest,
attachmentSignature,
receivedAt,
sentAt,
contentType,
@ -5635,7 +5618,7 @@ function saveAttachmentDownloadJob(
) VALUES (
${job.messageId},
${job.attachmentType},
${job.digest},
${job.attachmentSignature},
${job.receivedAt},
${job.sentAt},
${job.contentType},
@ -5664,7 +5647,10 @@ function resetAttachmentDownloadActive(db: WritableDB): void {
function removeAttachmentDownloadJob(
db: WritableDB,
job: Pick<AttachmentDownloadJobRow, 'messageId' | 'attachmentType' | 'digest'>
job: Pick<
AttachmentDownloadJobRow,
'messageId' | 'attachmentType' | 'attachmentSignature'
>
): void {
const [query, params] = sql`
DELETE FROM attachment_downloads
@ -5673,7 +5659,7 @@ function removeAttachmentDownloadJob(
AND
attachmentType = ${job.attachmentType}
AND
digest = ${job.digest};
attachmentSignature = ${job.attachmentSignature};
`;
db.prepare(query).run(params);

View file

@ -26,7 +26,6 @@ import {
import type { AttachmentType } from '../types/Attachment';
import { IMAGE_JPEG, stringToMIMEType } from '../types/MIME';
import { strictAssert } from '../util/assert';
import { sqlLogger } from './sqlLogger';
import type { MessageAttributesType } from '../model-types';
export const ROOT_MESSAGE_ATTACHMENT_EDIT_HISTORY_INDEX = -1;
@ -287,7 +286,6 @@ function convertAttachmentDBFieldsToAttachmentType(
height,
width,
digest,
iv,
key,
downloadPath,
flags,
@ -305,7 +303,6 @@ function convertAttachmentDBFieldsToAttachmentType(
backfillError,
storyTextAttachmentJson,
copiedFromQuotedAttachment,
isReencryptableToSameDigest,
localBackupPath,
} = messageAttachment;
@ -321,7 +318,6 @@ function convertAttachmentDBFieldsToAttachmentType(
height,
width,
digest,
iv,
key,
downloadPath,
localBackupPath,
@ -339,20 +335,10 @@ function convertAttachmentDBFieldsToAttachmentType(
copied: convertOptionalIntegerToBoolean(copiedFromQuotedAttachment),
isCorrupted: convertOptionalIntegerToBoolean(isCorrupted),
backfillError: convertOptionalIntegerToBoolean(backfillError),
isReencryptableToSameDigest: convertOptionalIntegerToBoolean(
isReencryptableToSameDigest
),
textAttachment: storyTextAttachmentJson
? jsonToObject(storyTextAttachmentJson)
: undefined,
...(messageAttachment.backupMediaName
? {
backupLocator: {
mediaName: messageAttachment.backupMediaName,
cdnNumber: messageAttachment.backupCdnNumber,
},
}
: {}),
backupCdnNumber: messageAttachment.backupCdnNumber,
...(messageAttachment.thumbnailPath
? {
thumbnail: {
@ -394,22 +380,5 @@ function convertAttachmentDBFieldsToAttachmentType(
: {}),
};
if (result.isReencryptableToSameDigest === false) {
if (
!messageAttachment.reencryptionIv ||
!messageAttachment.reencryptionKey ||
!messageAttachment.reencryptionDigest
) {
sqlLogger.warn(
'Attachment missing reencryption info despite not being reencryptable'
);
return result;
}
result.reencryptionInfo = {
iv: messageAttachment.reencryptionIv,
key: messageAttachment.reencryptionKey,
digest: messageAttachment.reencryptionDigest,
};
}
return result;
}

View file

@ -2,10 +2,11 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import * as z from 'zod';
import type { LoggerType } from '../../types/Logging';
import {
attachmentDownloadJobSchema,
attachmentDownloadTypeSchema,
type AttachmentDownloadJobType,
type AttachmentDownloadJobTypeType,
} from '../../types/AttachmentDownload';
@ -13,10 +14,15 @@ import type { AttachmentType } from '../../types/Attachment';
import { jsonToObject, objectToJSON, sql } from '../util';
import { AttachmentDownloadSource } from '../Interface';
import { parsePartial } from '../../util/schemas';
import { MIMETypeSchema } from '../../types/MIME';
import {
jobManagerJobSchema,
type JobManagerJobType,
} from '../../jobs/JobManager';
export const version = 1040;
export type LegacyAttachmentDownloadJobType = {
export type _AttachmentDownloadJobTypeV1030 = {
attachment: AttachmentType;
attempts: number;
id: string;
@ -27,6 +33,29 @@ export type LegacyAttachmentDownloadJobType = {
type: AttachmentDownloadJobTypeType;
};
const attachmentDownloadJobSchemaV1040 = z
.object({
attachment: z
.object({ size: z.number(), contentType: MIMETypeSchema })
.passthrough(),
attachmentType: attachmentDownloadTypeSchema,
ciphertextSize: z.number(),
contentType: MIMETypeSchema,
digest: z.string(),
isManualDownload: z.boolean().optional(),
messageId: z.string(),
messageIdForLogging: z.string().optional(),
receivedAt: z.number(),
sentAt: z.number(),
size: z.number(),
source: z.nativeEnum(AttachmentDownloadSource),
})
.and(jobManagerJobSchema);
export type _AttachmentDownloadJobTypeV1040 = Omit<
AttachmentDownloadJobType,
'attachmentSignature'
> & { digest: string };
export function updateToSchemaVersion1040(
currentVersion: number,
db: Database,
@ -112,15 +141,17 @@ export function updateToSchemaVersion1040(
`);
// 8. Rewrite old rows to match new schema
const rowsToTransfer: Array<AttachmentDownloadJobType> = [];
const rowsToTransfer: Array<
_AttachmentDownloadJobTypeV1040 & JobManagerJobType
> = [];
for (const existingJob of existingJobs) {
try {
// Type this as partial in case there is missing data
const existingJobData: Partial<LegacyAttachmentDownloadJobType> =
const existingJobData: Partial<_AttachmentDownloadJobTypeV1030> =
jsonToObject(existingJob.json ?? '');
const updatedJob: Partial<AttachmentDownloadJobType> = {
const updatedJob: Partial<_AttachmentDownloadJobTypeV1040> = {
messageId: existingJobData.messageId,
attachmentType: existingJobData.type,
attachment: existingJobData.attachment,
@ -140,9 +171,12 @@ export function updateToSchemaVersion1040(
ciphertextSize: 0,
};
const parsed = parsePartial(attachmentDownloadJobSchema, updatedJob);
const parsed = parsePartial(
attachmentDownloadJobSchemaV1040,
updatedJob
);
rowsToTransfer.push(parsed as AttachmentDownloadJobType);
rowsToTransfer.push(parsed);
} catch {
logger.warn(
`updateToSchemaVersion1040: unable to transfer job ${existingJob.id} to new table; invalid data`

View file

@ -0,0 +1,53 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { type WritableDB } from '../Interface';
export const version = 1390;
export function updateToSchemaVersion1390(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1390) {
return;
}
db.transaction(() => {
// TODO: DESKTOP-8879 Digest column is only used for deduplication purposes; here we
// genericize its name to attachmentSignature to allow jobs to be added with
// plaintextHash and no digest
db.exec(`
ALTER TABLE attachment_downloads
RENAME COLUMN digest TO attachmentSignature;
`);
// We no longer these need columns due to the new mediaName derivation
db.exec(`
ALTER TABLE message_attachments
DROP COLUMN iv;
ALTER TABLE message_attachments
DROP COLUMN isReencryptableToSameDigest;
ALTER TABLE message_attachments
DROP COLUMN reencryptionIv;
ALTER TABLE message_attachments
DROP COLUMN reencryptionKey;
ALTER TABLE message_attachments
DROP COLUMN reencryptionDigest;
ALTER TABLE message_attachments
DROP COLUMN backupMediaName;
`);
// Because mediaName has changed, backupCdnNumber is no longer accurate
db.exec(`
UPDATE message_attachments
SET backupCdnNumber = NULL;
`);
db.pragma('user_version = 1390');
})();
logger.info('updateToSchemaVersion1390: success!');
}

View file

@ -113,10 +113,11 @@ import { updateToSchemaVersion1340 } from './1340-recent-gifs';
import { updateToSchemaVersion1350 } from './1350-notification-profiles';
import { updateToSchemaVersion1360 } from './1360-attachments';
import { updateToSchemaVersion1370 } from './1370-message-attachment-indexes';
import { updateToSchemaVersion1380 } from './1380-donation-receipts';
import {
updateToSchemaVersion1380,
updateToSchemaVersion1390,
version as MAX_VERSION,
} from './1380-donation-receipts';
} from './1390-attachment-download-keys';
import { DataWriter } from '../Server';
@ -2108,6 +2109,7 @@ export const SCHEMA_VERSIONS = [
updateToSchemaVersion1360,
updateToSchemaVersion1370,
updateToSchemaVersion1380,
updateToSchemaVersion1390,
];
export class DBVersionFromFutureError extends Error {

View file

@ -19,7 +19,11 @@ import type { StateType as RootStateType } from '../reducer';
import { createLogger } from '../../logging/log';
import { getMessageById } from '../../messages/getMessageById';
import type { ReadonlyMessageAttributesType } from '../../model-types.d';
import { isGIF, isIncremental } from '../../types/Attachment';
import {
getUndownloadedAttachmentSignature,
isGIF,
isIncremental,
} from '../../types/Attachment';
import {
isImageTypeSupported,
isVideoTypeSupported,
@ -287,7 +291,8 @@ function showLightbox(opts: {
if (isIncremental(attachment)) {
// Queue all attachments, but this target attachment should be IMMEDIATE
const wasUpdated = await queueAttachmentDownloads(message, {
attachmentDigestForImmediate: attachment.digest,
attachmentSignatureForImmediate:
getUndownloadedAttachmentSignature(attachment),
isManualDownload: true,
urgency: AttachmentDownloadUrgency.STANDARD,
});

View file

@ -23,6 +23,7 @@ import {
EmojiSkinTone,
isValidEmojiSkinTone,
} from '../../components/fun/data/emojis';
import { BackupLevel } from '../../services/backups/types';
const DEFAULT_PREFERRED_LEFT_PANE_WIDTH = 320;
@ -267,12 +268,14 @@ export const getBackupMediaDownloadProgress = createSelector(
(
state: ItemsStateType
): {
isBackupMediaEnabled: boolean;
totalBytes: number;
downloadedBytes: number;
isPaused: boolean;
downloadBannerDismissed: boolean;
isIdle: boolean;
} => ({
isBackupMediaEnabled: state.backupTier === BackupLevel.Paid,
totalBytes: state.backupMediaDownloadTotalBytes ?? 0,
downloadedBytes: state.backupMediaDownloadCompletedBytes ?? 0,
isPaused: state.backupMediaDownloadPaused ?? false,

View file

@ -4,7 +4,8 @@
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
import { join } from 'path';
import { createCipheriv } from 'crypto';
import { PassThrough } from 'stream';
import { emptyDir } from 'fs-extra';
import { assert } from 'chai';
import { isNumber } from 'lodash';
@ -40,7 +41,6 @@ import {
CipherType,
} from '../Crypto';
import {
type HardcodedIVForEncryptionType,
_generateAttachmentIv,
decryptAttachmentV2,
encryptAttachmentV2ToDisk,
@ -49,10 +49,12 @@ import {
splitKeys,
generateAttachmentKeys,
type DecryptedAttachmentV2,
decryptAttachmentV2ToSink,
} from '../AttachmentCrypto';
import type { AciString, PniString } from '../types/ServiceId';
import { createTempDir, deleteTempDir } from '../updater/common';
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
import { getPath } from '../windows/main/attachments';
const log = createLogger('Crypto_test');
@ -589,7 +591,10 @@ describe('Crypto', () => {
idForLogging: 'test',
...splitKeys(keys),
size: FILE_CONTENTS.byteLength,
theirDigest: encryptedAttachment.digest,
integrityCheck: {
type: 'encrypted',
digest: encryptedAttachment.digest,
},
theirIncrementalMac: undefined,
theirChunkSize: undefined,
getAbsoluteAttachmentPath:
@ -612,13 +617,86 @@ describe('Crypto', () => {
}
});
describe('decryptAttachmentV2ToSink', () => {
afterEach(async () => {
await emptyDir(getPath(window.SignalContext.config.userDataPath));
});
it('throws if digest is wrong', async () => {
const keys = generateAttachmentKeys();
const encryptedAttachment = await encryptAttachmentV2ToDisk({
keys,
plaintext: { data: FILE_CONTENTS },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
await assert.isRejected(
decryptAttachmentV2ToSink(
{
type: 'standard',
ciphertextPath:
window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
),
idForLogging: 'test',
...splitKeys(keys),
size: FILE_CONTENTS.byteLength,
integrityCheck: {
type: 'encrypted',
digest: sha256(new Uint8Array([1, 2, 3])),
},
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
},
new PassThrough().resume()
),
/Bad digest/
);
});
it('throws if plaintextHash is wrong', async () => {
const keys = generateAttachmentKeys();
const encryptedAttachment = await encryptAttachmentV2ToDisk({
keys,
plaintext: { data: FILE_CONTENTS },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
await assert.isRejected(
decryptAttachmentV2ToSink(
{
type: 'standard',
ciphertextPath:
window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
),
idForLogging: 'test',
...splitKeys(keys),
size: FILE_CONTENTS.byteLength,
integrityCheck: {
type: 'plaintext',
plaintextHash: sha256(new Uint8Array([1, 2, 3])),
},
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
},
new PassThrough().resume()
),
/Bad plaintextHash/
);
});
});
describe('v2 roundtrips', () => {
async function testV2RoundTripData({
path,
data,
plaintextHash,
encryptionKeys,
dangerousIv,
modifyIncrementalMac,
overrideSize,
}: {
@ -626,7 +704,6 @@ describe('Crypto', () => {
data: Uint8Array;
plaintextHash?: Uint8Array;
encryptionKeys?: Uint8Array;
dangerousIv?: HardcodedIVForEncryptionType;
modifyIncrementalMac?: boolean;
overrideSize?: number;
}): Promise<DecryptedAttachmentV2> {
@ -638,7 +715,6 @@ describe('Crypto', () => {
const encryptedAttachment = await encryptAttachmentV2ToDisk({
keys,
plaintext: path ? { absolutePath: path } : { data },
dangerousIv,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
@ -657,38 +733,47 @@ describe('Crypto', () => {
encryptedAttachment.incrementalMac[macLength / 2] += 1;
}
// Decrypt it via plaintextHash first
await decryptAttachmentV2ToSink(
{
type: 'standard',
ciphertextPath,
idForLogging: 'test',
...splitKeys(keys),
size: overrideSize ?? data.byteLength,
integrityCheck: {
type: 'plaintext',
plaintextHash: Bytes.fromHex(encryptedAttachment.plaintextHash),
},
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
},
new PassThrough().resume()
);
const decryptedAttachment = await decryptAttachmentV2({
type: 'standard',
ciphertextPath,
idForLogging: 'test',
...splitKeys(keys),
size: overrideSize ?? data.byteLength,
theirDigest: encryptedAttachment.digest,
integrityCheck: {
type: 'encrypted',
digest: encryptedAttachment.digest,
},
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
decryptedAttachment.path
);
const plaintext = readFileSync(plaintextPath);
assert.deepStrictEqual(
encryptedAttachment.iv,
decryptedAttachment.iv
);
if (dangerousIv) {
assert.deepStrictEqual(encryptedAttachment.iv, dangerousIv.iv);
if (dangerousIv.reason === 'reencrypting-for-backup') {
assert.deepStrictEqual(
encryptedAttachment.digest,
dangerousIv.digestToMatch
);
}
}
assert.strictEqual(
encryptedAttachment.ciphertextSize,
getAttachmentCiphertextLength(data.byteLength)
@ -788,74 +873,6 @@ describe('Crypto', () => {
plaintextHash,
});
});
describe('isPaddingAllZeros', () => {
it('detects all zeros', async () => {
const decryptedResult = await testV2RoundTripData({
data: FILE_CONTENTS,
});
assert.isTrue(decryptedResult.isReencryptableToSameDigest);
});
it('detects non-zero padding', async () => {
const modifiedData = Buffer.concat([FILE_CONTENTS, Buffer.from([1])]);
const decryptedResult = await testV2RoundTripData({
data: modifiedData,
overrideSize: FILE_CONTENTS.byteLength,
// setting the size as one less than the actual file size will cause the last
// byte (`1`) to be considered padding during decryption
});
assert.isFalse(decryptedResult.isReencryptableToSameDigest);
});
});
describe('dangerousIv', () => {
it('uses hardcodedIv in tests', async () => {
await testV2RoundTripData({
data: FILE_CONTENTS,
plaintextHash: FILE_HASH,
dangerousIv: {
reason: 'test',
iv: _generateAttachmentIv(),
},
});
});
it('uses hardcodedIv when re-encrypting for backup', async () => {
const keys = generateAttachmentKeys();
const previouslyEncrypted = await encryptAttachmentV2ToDisk({
keys,
plaintext: { data: FILE_CONTENTS },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
await testV2RoundTripData({
data: FILE_CONTENTS,
plaintextHash: FILE_HASH,
encryptionKeys: keys,
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: previouslyEncrypted.iv,
digestToMatch: previouslyEncrypted.digest,
},
});
// If the digest is wrong, it should throw
await assert.isRejected(
testV2RoundTripData({
data: FILE_CONTENTS,
plaintextHash: FILE_HASH,
encryptionKeys: keys,
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: previouslyEncrypted.iv,
digestToMatch: getRandomBytes(32),
},
}),
'iv was hardcoded for backup re-encryption, but digest does not match'
);
});
});
});
it('v2 -> v1 (disk -> memory)', async () => {
@ -922,7 +939,7 @@ describe('Crypto', () => {
const encryptedAttachmentV2 = await encryptAttachmentV2ToDisk({
keys,
plaintext: { absolutePath: FILE_PATH },
dangerousIv: { iv: dangerousTestOnlyIv, reason: 'test' },
_testOnlyDangerousIv: dangerousTestOnlyIv,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
@ -973,7 +990,7 @@ describe('Crypto', () => {
keys: outerKeys,
plaintext: { absolutePath: innerCiphertextPath },
// We (and the server!) don't pad the second layer
dangerousTestOnlySkipPadding: true,
_testOnlyDangerousSkipPadding: true,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
@ -1014,7 +1031,10 @@ describe('Crypto', () => {
idForLogging: 'test',
...splitKeys(innerKeys),
size: FILE_CONTENTS.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
integrityCheck: {
type: 'encrypted',
digest: encryptResult.innerEncryptedAttachment.digest,
},
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
@ -1073,7 +1093,10 @@ describe('Crypto', () => {
idForLogging: 'test',
...splitKeys(innerKeys),
size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
integrityCheck: {
type: 'encrypted',
digest: encryptResult.innerEncryptedAttachment.digest,
},
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
@ -1126,7 +1149,10 @@ describe('Crypto', () => {
idForLogging: 'test',
...splitKeys(innerKeys),
size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
integrityCheck: {
type: 'encrypted',
digest: encryptResult.innerEncryptedAttachment.digest,
},
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,

View file

@ -27,26 +27,25 @@ import type {
MessageAttributesType,
QuotedMessageType,
} from '../../model-types';
import { isVoiceMessage, type AttachmentType } from '../../types/Attachment';
import {
hasRequiredInformationForBackup,
isVoiceMessage,
type AttachmentType,
} from '../../types/Attachment';
import { strictAssert } from '../../util/assert';
import { SignalService } from '../../protobuf';
import { getRandomBytes } from '../../Crypto';
import { loadAllAndReinitializeRedux } from '../../services/allLoaders';
import {
generateAttachmentKeys,
generateKeys,
getPlaintextHashForInMemoryAttachment,
} from '../../AttachmentCrypto';
import { isValidAttachmentKey } from '../../types/Crypto';
const CONTACT_A = generateAci();
const NON_ROUNDTRIPPED_FIELDS = [
'path',
'iv',
'thumbnail',
'screenshot',
'isReencryptableToSameDigest',
];
const NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS = [
...NON_ROUNDTRIPPED_FIELDS,
'uploadTimestamp',
];
const NON_ROUNDTRIPPED_FIELDS = ['path', 'thumbnail', 'screenshot', 'localKey'];
describe('backup/attachments', () => {
let sandbox: sinon.SinonSandbox;
@ -90,14 +89,6 @@ describe('backup/attachments', () => {
sandbox.restore();
});
function getBase64(str: string): string {
return Bytes.toBase64(Bytes.fromString(str));
}
function digestToMediaName(digestBase64: string): string {
return Bytes.toHex(Bytes.fromBase64(digestBase64));
}
function composeAttachment(
index: number,
overrides?: Partial<AttachmentType>
@ -106,13 +97,13 @@ describe('backup/attachments', () => {
cdnKey: `cdnKey${index}`,
cdnNumber: 3,
clientUuid: generateGuid(),
key: getBase64(`key${index}`),
digest: getBase64(`digest${index}`),
iv: getBase64(`iv${index}`),
plaintextHash: Bytes.toHex(getRandomBytes(32)),
key: Bytes.toBase64(generateKeys()),
digest: Bytes.toBase64(getRandomBytes(32)),
size: 100,
contentType: IMAGE_JPEG,
path: `/path/to/file${index}.png`,
isReencryptableToSameDigest: true,
localKey: Bytes.toBase64(generateAttachmentKeys()),
uploadTimestamp: index,
thumbnail: {
size: 1024,
@ -147,6 +138,17 @@ describe('backup/attachments', () => {
};
}
function expectedRoundtrippedFields(
attachment: AttachmentType
): AttachmentType {
const base = omit(attachment, NON_ROUNDTRIPPED_FIELDS);
if (hasRequiredInformationForBackup(attachment)) {
delete base.digest;
} else {
delete base.plaintextHash;
}
return base;
}
describe('long-message attachments', () => {
it('preserves attachment still on message.attachments', async () => {
const longMessageAttachment = composeAttachment(1, {
@ -164,32 +166,33 @@ describe('backup/attachments', () => {
schemaVersion: 12,
}),
],
// path & iv will not be roundtripped
[
composeMessage(1, {
attachments: [
omit(longMessageAttachment, NON_ROUNDTRIPPED_FIELDS),
omit(normalAttachment, NON_ROUNDTRIPPED_FIELDS),
expectedRoundtrippedFields(longMessageAttachment),
expectedRoundtrippedFields(normalAttachment),
],
}),
],
{ backupLevel: BackupLevel.Free }
]
);
});
it('migration creates long-message attachment if there is a long message.body (i.e. schemaVersion < 13)', async () => {
const body = 'a'.repeat(3000);
const bodyBytes = Bytes.fromString(body);
await asymmetricRoundtripHarness(
[
composeMessage(1, {
body: 'a'.repeat(3000),
body,
schemaVersion: 12,
}),
],
[
composeMessage(1, {
body: 'a'.repeat(2048),
body: body.slice(0, 2048),
bodyAttachment: {
contentType: LONG_MESSAGE,
size: 3000,
size: bodyBytes.byteLength,
plaintextHash: getPlaintextHashForInMemoryAttachment(bodyBytes),
},
}),
],
@ -204,21 +207,16 @@ describe('backup/attachments', () => {
assert.deepStrictEqual(
expected.bodyAttachment,
// all encryption info will be generated anew
omit(msgInDB.bodyAttachment, [
'backupLocator',
'digest',
'key',
'downloadPath',
])
omit(msgInDB.bodyAttachment, ['digest', 'key', 'downloadPath'])
);
assert.isNotEmpty(msgInDB.bodyAttachment?.backupLocator);
assert.isNotEmpty(msgInDB.bodyAttachment?.digest);
assert.isNotEmpty(msgInDB.bodyAttachment?.key);
assert.isUndefined(msgInDB.bodyAttachment?.digest);
assert.isTrue(isValidAttachmentKey(msgInDB.bodyAttachment?.key));
},
}
);
});
it('handles existing bodyAttachments', async () => {
const attachment = omit(
composeAttachment(1, {
@ -237,16 +235,10 @@ describe('backup/attachments', () => {
body: 'a'.repeat(3000),
}),
],
// path & iv will not be roundtripped
[
composeMessage(1, {
body: 'a'.repeat(2048),
bodyAttachment: {
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
bodyAttachment: expectedRoundtrippedFields(attachment),
}),
],
{
@ -280,12 +272,11 @@ describe('backup/attachments', () => {
attachments: [attachment1, attachment2],
}),
],
// path & iv will not be roundtripped
[
composeMessage(1, {
attachments: [
omit(attachment1, NON_ROUNDTRIPPED_FIELDS),
omit(attachment2, NON_ROUNDTRIPPED_FIELDS),
expectedRoundtrippedFields(attachment1),
expectedRoundtrippedFields(attachment2),
],
}),
],
@ -304,16 +295,7 @@ describe('backup/attachments', () => {
],
[
composeMessage(1, {
// path, iv, and uploadTimestamp will not be roundtripped,
// but there will be a backupLocator
attachments: [
{
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
],
attachments: [expectedRoundtrippedFields(attachment)],
}),
],
{ backupLevel: BackupLevel.Paid }
@ -335,14 +317,7 @@ describe('backup/attachments', () => {
],
[
composeMessage(1, {
attachments: [
{
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
],
attachments: [expectedRoundtrippedFields(attachment)],
}),
],
{ backupLevel: BackupLevel.Paid }
@ -368,11 +343,8 @@ describe('backup/attachments', () => {
body: 'hello',
attachments: [
{
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
...expectedRoundtrippedFields(attachment),
flags: undefined,
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
],
}),
@ -395,7 +367,6 @@ describe('backup/attachments', () => {
],
}),
],
// path & iv will not be roundtripped
[
composeMessage(1, {
body: 'https://signal.org',
@ -403,7 +374,7 @@ describe('backup/attachments', () => {
{
url: 'https://signal.org',
date: 1,
image: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
image: expectedRoundtrippedFields(attachment),
},
],
}),
@ -439,14 +410,7 @@ describe('backup/attachments', () => {
date: 1,
title: 'title',
description: 'description',
image: {
// path, iv, and uploadTimestamp will not be roundtripped,
// but there will be a backupLocator
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
image: expectedRoundtrippedFields(attachment),
},
],
}),
@ -472,7 +436,7 @@ describe('backup/attachments', () => {
contact: [
{
avatar: {
avatar: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
avatar: expectedRoundtrippedFields(attachment),
isProfile: false,
},
},
@ -492,19 +456,12 @@ describe('backup/attachments', () => {
contact: [{ avatar: { avatar: attachment, isProfile: false } }],
}),
],
// path, iv, and uploadTimestamp will not be roundtripped,
// but there will be a backupLocator
[
composeMessage(1, {
contact: [
{
avatar: {
avatar: {
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
avatar: expectedRoundtrippedFields(attachment),
isProfile: false,
},
},
@ -544,7 +501,7 @@ describe('backup/attachments', () => {
...quotedMessage,
attachments: [
{
thumbnail: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
thumbnail: expectedRoundtrippedFields(attachment),
contentType: VIDEO_MP4,
},
],
@ -581,12 +538,7 @@ describe('backup/attachments', () => {
...quotedMessage,
attachments: [
{
thumbnail: {
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
thumbnail: expectedRoundtrippedFields(attachment),
contentType: VIDEO_MP4,
},
],
@ -629,17 +581,7 @@ describe('backup/attachments', () => {
[
{
...existingMessage,
attachments: [
{
...omit(
existingAttachment,
NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS
),
backupLocator: {
mediaName: digestToMediaName(existingAttachment.digest),
},
},
],
attachments: [expectedRoundtrippedFields(existingAttachment)],
},
{
...quoteMessage,
@ -650,15 +592,7 @@ describe('backup/attachments', () => {
{
// The thumbnail will not have been copied over yet since it has not yet
// been downloaded
thumbnail: {
...omit(
quoteAttachment,
NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS
),
backupLocator: {
mediaName: digestToMediaName(quoteAttachment.digest),
},
},
thumbnail: expectedRoundtrippedFields(quoteAttachment),
contentType: VIDEO_MP4,
},
],
@ -685,6 +619,8 @@ describe('backup/attachments', () => {
contentType: IMAGE_PNG,
size: 100,
path: 'path/to/thumbnail',
localKey: Bytes.toBase64(generateAttachmentKeys()),
plaintextHash: Bytes.toHex(getRandomBytes(32)),
},
contentType: VIDEO_MP4,
},
@ -728,19 +664,19 @@ describe('backup/attachments', () => {
omit(msgBefore, 'quote.attachments[0].thumbnail'),
omit(msgAfter, 'quote.attachments[0].thumbnail')
);
const { key, digest } = thumbnail;
strictAssert(digest, 'quote digest was created');
strictAssert(key, 'quote digest was created');
const { key, plaintextHash } = thumbnail;
strictAssert(thumbnail, 'thumbnail exists');
strictAssert(key, 'thumbnail key was created');
strictAssert(plaintextHash, 'quote plaintextHash was roundtripped');
strictAssert(
hasRequiredInformationForBackup(thumbnail),
'has key and plaintextHash'
);
assert.deepStrictEqual(thumbnail, {
contentType: IMAGE_PNG,
size: 100,
key,
digest,
backupLocator: {
mediaName: digestToMediaName(digest),
},
key: thumbnail.key,
plaintextHash: thumbnail.plaintextHash,
});
},
}
@ -753,7 +689,8 @@ describe('backup/attachments', () => {
const packKey = Bytes.toBase64(getRandomBytes(32));
describe('when copied over from sticker pack (i.e. missing encryption info)', () => {
it('BackupLevel.Paid, generates new encryption info', async () => {
// TODO: DESKTOP-8896
it.skip('BackupLevel.Paid, generates new encryption info', async () => {
await asymmetricRoundtripHarness(
[
composeMessage(1, {
@ -810,9 +747,6 @@ describe('backup/attachments', () => {
height: 512,
key,
digest,
backupLocator: {
mediaName: digestToMediaName(digest),
},
});
},
}
@ -885,12 +819,7 @@ describe('backup/attachments', () => {
packId,
packKey,
stickerId: 0,
data: {
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
backupLocator: {
mediaName: digestToMediaName(attachment.digest),
},
},
data: expectedRoundtrippedFields(attachment),
},
}),
],

File diff suppressed because it is too large Load diff

View file

@ -40,6 +40,8 @@ describe('backup/integration', () => {
const files = readdirSync(BACKUP_INTEGRATION_DIR)
.filter(file => file.endsWith('.binproto'))
// TODO: DESKTOP-8906
.filter(file => file !== 'chat_item_view_once_00.binproto')
.map(file => join(BACKUP_INTEGRATION_DIR, file));
if (files.length === 0) {

View file

@ -89,7 +89,6 @@ function composeAttachment(
cdnNumber: 3,
key: getBase64(`key${label}`),
digest: getBase64(`digest${label}`),
iv: getBase64(`iv${label}`),
size: 100,
downloadPath: 'downloadPath',
contentType: IMAGE_JPEG,
@ -107,12 +106,8 @@ function composeAttachment(
flags: 8,
incrementalMac: 'incrementalMac',
chunkSize: 128,
isReencryptableToSameDigest: true,
version: 2,
backupLocator: {
mediaName: `medianame${label}`,
cdnNumber: index,
},
backupCdnNumber: index,
localBackupPath: `localBackupPath/${label}`,
// This would only exist on a story message with contentType TEXT_ATTACHMENT,
// but inluding it here to ensure we are roundtripping all fields
@ -130,7 +125,6 @@ function composeAttachment(
thumbnail: composeThumbnail(index),
screenshot: composeScreenshot(index),
thumbnailFromBackup: composeBackupThumbnail(index),
...overrides,
} as const;

View file

@ -60,8 +60,6 @@ describe('AttachmentBackupManager/JobManager', function attachmentBackupManager(
path: RELATIVE_ATTACHMENT_PATH,
contentType: VIDEO_MP4,
keys: 'keys=',
iv: 'iv==',
digest: 'digest=',
version: 2,
localKey: LOCAL_ENCRYPTION_KEYS,
transitCdnInfo: {

View file

@ -24,9 +24,14 @@ import { type AttachmentType, AttachmentVariant } from '../../types/Attachment';
import { strictAssert } from '../../util/assert';
import type { downloadAttachment as downloadAttachmentUtil } from '../../util/downloadAttachment';
import { AttachmentDownloadSource } from '../../sql/Interface';
import { getAttachmentCiphertextLength } from '../../AttachmentCrypto';
import {
generateAttachmentKeys,
getAttachmentCiphertextLength,
} from '../../AttachmentCrypto';
import { MEBIBYTE } from '../../types/AttachmentSize';
import { generateAci } from '../../types/ServiceId';
import { toBase64, toHex } from '../../Bytes';
import { getRandomBytes } from '../../Crypto';
function composeJob({
messageId,
@ -38,6 +43,7 @@ function composeJob({
jobOverrides?: Partial<AttachmentDownloadJobType>;
}): AttachmentDownloadJobType {
const digest = `digestFor${messageId}`;
const plaintextHash = toHex(getRandomBytes(32));
const size = 128;
const contentType = MIME.IMAGE_PNG;
return {
@ -45,7 +51,7 @@ function composeJob({
receivedAt,
sentAt: receivedAt,
attachmentType: 'attachment',
digest,
attachmentSignature: `${digest}.${plaintextHash}`,
size,
ciphertextSize: getAttachmentCiphertextLength(size),
contentType,
@ -57,7 +63,9 @@ function composeJob({
attachment: {
contentType,
size,
digest: `digestFor${messageId}`,
digest,
plaintextHash,
key: toBase64(generateAttachmentKeys()),
...attachmentOverrides,
},
...jobOverrides,
@ -185,11 +193,14 @@ describe('AttachmentDownloadManager/JobManager', () => {
.getCalls()
.map(
call =>
`${call.args[0].job.messageId}${call.args[0].job.attachmentType}.${call.args[0].job.digest}`
`${call.args[0].job.messageId}${call.args[0].job.attachmentType}.${call.args[0].job.attachmentSignature}`
)
),
JSON.stringify(
jobs.map(job => `${job.messageId}${job.attachmentType}.${job.digest}`)
jobs.map(
job =>
`${job.messageId}${job.attachmentType}.${job.attachmentSignature}`
)
)
);
}
@ -317,17 +328,8 @@ describe('AttachmentDownloadManager/JobManager', () => {
});
it('triggers onLowDiskSpace for backup import jobs', async () => {
const jobs = await addJobs(1, idx => ({
const jobs = await addJobs(1, _idx => ({
source: AttachmentDownloadSource.BACKUP_IMPORT,
digest: `digestFor${idx}`,
attachment: {
contentType: MIME.IMAGE_JPEG,
size: 128,
digest: `digestFor${idx}`,
backupLocator: {
mediaName: 'medianame',
},
},
}));
const jobAttempts = getPromisesForAttempts(jobs[0], 2);
@ -466,20 +468,12 @@ describe('AttachmentDownloadManager/JobManager', () => {
it('only selects backup_import jobs if the mediaDownload is not paused', async () => {
await window.storage.put('backupMediaDownloadPaused', true);
const jobs = await addJobs(6, idx => ({
source:
idx % 2 === 0
? AttachmentDownloadSource.BACKUP_IMPORT
: AttachmentDownloadSource.STANDARD,
digest: `digestFor${idx}`,
attachment: {
contentType: MIME.IMAGE_JPEG,
size: 128,
digest: `digestFor${idx}`,
backupLocator: {
mediaName: 'medianame',
},
},
}));
// make one of the backup job messages visible to test that code path as well
downloadManager?.updateVisibleTimelineMessages(['message-0', 'message-1']);
@ -514,9 +508,8 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
ReturnType<typeof downloadAttachmentUtil>
> = {
path: '/path/to/file',
iv: 'iv',
digest: 'digest',
plaintextHash: 'plaintextHash',
isReencryptableToSameDigest: true,
localKey: 'localKey',
version: 2,
size: 128,
@ -527,6 +520,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
downloadAttachment = sandbox
.stub()
.returns(Promise.resolve(downloadedAttachment));
sandbox
.stub(window.Signal.Services.backups, 'hasMediaBackups')
.returns(true);
processNewAttachment = sandbox.stub().callsFake(attachment => attachment);
});
@ -538,6 +535,9 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
plaintextHash: undefined,
},
});
const result = await runDownloadAttachmentJobInner({
@ -567,11 +567,6 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
backupLocator: {
mediaName: 'medianame',
},
},
});
const result = await runDownloadAttachmentJobInner({
@ -593,12 +588,7 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
);
assert.deepStrictEqual(
omit(result.attachmentWithThumbnail, 'thumbnailFromBackup'),
{
contentType: MIME.IMAGE_PNG,
size: 128,
digest: 'digestFor1',
backupLocator: { mediaName: 'medianame' },
}
job.attachment
);
assert.equal(
result.attachmentWithThumbnail.thumbnailFromBackup?.path,
@ -618,9 +608,6 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
backupLocator: {
mediaName: 'medianame',
},
thumbnailFromBackup: {
path: '/path/to/thumbnail',
size: 128,
@ -660,11 +647,6 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
backupLocator: {
mediaName: 'medianame',
},
},
});
await assert.isRejected(
@ -704,11 +686,6 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
backupLocator: {
mediaName: 'medianame',
},
},
});
const result = await runDownloadAttachmentJobInner({
@ -733,7 +710,7 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
AttachmentVariant.Default
);
});
it('will fallback to thumbnail if main download fails and backuplocator exists', async () => {
it('will fallback to thumbnail if main download fails and might exist on backup', async () => {
downloadAttachment = sandbox.stub().callsFake(({ options }) => {
if (options.variant === AttachmentVariant.Default) {
throw new Error('error while downloading');
@ -744,11 +721,6 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
backupLocator: {
mediaName: 'medianame',
},
},
});
const result = await runDownloadAttachmentJobInner({
@ -784,21 +756,24 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
);
});
it("won't fallback to thumbnail if main download fails and no backup locator", async () => {
it("won't fallback to thumbnail if main download fails and not on backup", async () => {
downloadAttachment = sandbox.stub().callsFake(({ options }) => {
if (options.variant === AttachmentVariant.Default) {
throw new Error('error while downloading');
}
return {
path: '/path/to/thumbnail',
iv: Buffer.alloc(16),
plaintextHash: 'plaintextHash',
digest: 'digest',
};
});
const job = composeJob({
messageId: '1',
receivedAt: 1,
attachmentOverrides: {
plaintextHash: undefined,
},
});
await assert.isRejected(

View file

@ -10,7 +10,10 @@ import { IMAGE_PNG } from '../../types/MIME';
import { downloadAttachment } from '../../util/downloadAttachment';
import { MediaTier } from '../../types/AttachmentDownload';
import { HTTPError } from '../../textsecure/Errors';
import { getCdnNumberForBackupTier } from '../../textsecure/downloadAttachment';
import {
getCdnNumberForBackupTier,
type downloadAttachment as downloadAttachmentFromServer,
} from '../../textsecure/downloadAttachment';
import { MASTER_KEY, MEDIA_ROOT_KEY } from '../backup/helpers';
import { getMediaIdFromMediaName } from '../../services/backups/util/mediaId';
import {
@ -18,17 +21,28 @@ import {
AttachmentPermanentlyUndownloadableError,
} from '../../types/Attachment';
import { updateRemoteConfig } from '../../test-both/helpers/RemoteConfigStub';
import type { WebAPIType } from '../../textsecure/WebAPI';
import { toHex, toBase64 } from '../../Bytes';
import { generateAttachmentKeys } from '../../AttachmentCrypto';
import { getRandomBytes } from '../../Crypto';
describe('utils/downloadAttachment', () => {
const baseAttachment = {
size: 100,
contentType: IMAGE_PNG,
digest: 'digest',
cdnKey: 'cdnKey',
cdnNumber: 2,
key: toBase64(generateAttachmentKeys()),
};
const backupableAttachment = {
...baseAttachment,
plaintextHash: toHex(getRandomBytes(32)),
};
const abortController = new AbortController();
let sandbox: sinon.SinonSandbox;
const fakeServer = {};
const fakeServer = {} as WebAPIType;
beforeEach(() => {
sandbox = sinon.createSandbox();
sandbox.stub(window, 'textsecure').value({ server: fakeServer });
@ -37,16 +51,20 @@ describe('utils/downloadAttachment', () => {
sandbox.restore();
});
function assertDownloadArgs(
actual: unknown,
expected: Parameters<typeof downloadAttachmentFromServer>
) {
assert.deepStrictEqual(actual, expected);
}
it('downloads from transit tier first if no backup information', async () => {
const stubDownload = sinon.stub();
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
};
const attachment = baseAttachment;
await downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -56,11 +74,10 @@ describe('utils/downloadAttachment', () => {
},
});
assert.equal(stubDownload.callCount, 1);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.STANDARD },
{
mediaTier: MediaTier.STANDARD,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -75,15 +92,12 @@ describe('utils/downloadAttachment', () => {
.onFirstCall()
.throws(new HTTPError('not found', { code: 404, headers: {} }));
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
};
const attachment = baseAttachment;
await assert.isRejected(
downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -96,11 +110,10 @@ describe('utils/downloadAttachment', () => {
);
assert.equal(stubDownload.callCount, 1);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.STANDARD },
{
mediaTier: MediaTier.STANDARD,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -111,17 +124,11 @@ describe('utils/downloadAttachment', () => {
it('downloads from backup tier first if there is backup information', async () => {
const stubDownload = sinon.stub();
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
backupLocator: {
mediaName: 'medianame',
},
};
const attachment = backupableAttachment;
await downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -131,11 +138,10 @@ describe('utils/downloadAttachment', () => {
},
});
assert.equal(stubDownload.callCount, 1);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.BACKUP },
{
mediaTier: MediaTier.BACKUP,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -150,17 +156,11 @@ describe('utils/downloadAttachment', () => {
.onFirstCall()
.throws(new HTTPError('not found', { code: 404, headers: {} }));
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
backupLocator: {
mediaName: 'medianame',
},
};
const attachment = backupableAttachment;
await downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -170,22 +170,23 @@ describe('utils/downloadAttachment', () => {
},
});
assert.equal(stubDownload.callCount, 2);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.BACKUP },
{
mediaTier: MediaTier.BACKUP,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
logPrefix: '[REDACTED]est',
},
]);
assert.deepEqual(stubDownload.getCall(1).args, [
assertDownloadArgs(stubDownload.getCall(1).args, [
fakeServer,
attachment,
{
attachment,
mediaTier: MediaTier.STANDARD,
},
{
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -200,17 +201,11 @@ describe('utils/downloadAttachment', () => {
.onFirstCall()
.throws(new Error('could not decrypt!'));
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
backupLocator: {
mediaName: 'medianame',
},
};
const attachment = backupableAttachment;
await downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -220,22 +215,20 @@ describe('utils/downloadAttachment', () => {
},
});
assert.equal(stubDownload.callCount, 2);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.BACKUP },
{
mediaTier: MediaTier.BACKUP,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
logPrefix: '[REDACTED]est',
},
]);
assert.deepEqual(stubDownload.getCall(1).args, [
assertDownloadArgs(stubDownload.getCall(1).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.STANDARD },
{
mediaTier: MediaTier.STANDARD,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -244,24 +237,18 @@ describe('utils/downloadAttachment', () => {
]);
});
it('does not throw permanently missing error if not found on transit tier but there is backuplocator', async () => {
it('does not throw permanently missing error if not found on transit tier but attachment is backupable', async () => {
const stubDownload = sinon
.stub()
.throws(new HTTPError('not found', { code: 404, headers: {} }));
const attachment = {
...baseAttachment,
cdnKey: 'cdnKey',
cdnNumber: 2,
backupLocator: {
mediaName: 'medianame',
},
};
const attachment = backupableAttachment;
await assert.isRejected(
downloadAttachment({
attachment,
options: {
hasMediaBackups: true,
onSizeUpdate: noop,
abortSignal: abortController.signal,
},
@ -273,22 +260,20 @@ describe('utils/downloadAttachment', () => {
HTTPError
);
assert.equal(stubDownload.callCount, 2);
assert.deepEqual(stubDownload.getCall(0).args, [
assertDownloadArgs(stubDownload.getCall(0).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.BACKUP },
{
mediaTier: MediaTier.BACKUP,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
logPrefix: '[REDACTED]est',
},
]);
assert.deepEqual(stubDownload.getCall(1).args, [
assertDownloadArgs(stubDownload.getCall(1).args, [
fakeServer,
attachment,
{ attachment, mediaTier: MediaTier.STANDARD },
{
mediaTier: MediaTier.STANDARD,
variant: AttachmentVariant.Default,
onSizeUpdate: noop,
abortSignal: abortController.signal,
@ -331,18 +316,19 @@ describe('getCdnNumberForBackupTier', () => {
const baseAttachment = {
size: 100,
contentType: IMAGE_PNG,
plaintextHash: 'plaintextHash',
key: 'key',
};
it('uses cdnNumber on attachment', async () => {
const result = await getCdnNumberForBackupTier({
...baseAttachment,
backupLocator: { mediaName: 'mediaName', cdnNumber: 4 },
backupCdnNumber: 4,
});
assert.equal(result, 4);
});
it('uses default cdn number if none on attachment', async () => {
const result = await getCdnNumberForBackupTier({
...baseAttachment,
backupLocator: { mediaName: 'mediaName' },
});
assert.equal(result, 42);
});
@ -356,7 +342,6 @@ describe('getCdnNumberForBackupTier', () => {
]);
const result = await getCdnNumberForBackupTier({
...baseAttachment,
backupLocator: { mediaName: 'mediaName' },
});
assert.equal(result, 42);
});

View file

@ -1,202 +0,0 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { join } from 'path';
import * as assert from 'assert';
import Sinon from 'sinon';
import { randomBytes } from 'crypto';
import { omit } from 'lodash';
import { readFileSync, statSync } from 'fs';
import type {
AttachmentType,
LocallySavedAttachment,
} from '../../types/Attachment';
import { IMAGE_JPEG } from '../../types/MIME';
import {
encryptAttachmentV2,
generateAttachmentKeys,
safeUnlink,
} from '../../AttachmentCrypto';
import { fromBase64, toBase64 } from '../../Bytes';
import { ensureAttachmentIsReencryptable } from '../../util/ensureAttachmentIsReencryptable';
import { strictAssert } from '../../util/assert';
import { writeNewAttachmentData } from '../../windows/main/attachments';
describe('utils/ensureAttachmentIsReencryptable', async () => {
const fixturesDir = join(__dirname, '..', '..', '..', 'fixtures');
const plaintextFilePath = join(fixturesDir, 'cat-screenshot.png');
const keys = generateAttachmentKeys();
let digest: Uint8Array;
let iv: Uint8Array;
const { size } = statSync(plaintextFilePath);
let sandbox: Sinon.SinonSandbox;
before(async () => {
const encrypted = await encryptAttachmentV2({
keys,
plaintext: {
absolutePath: plaintextFilePath,
},
needIncrementalMac: false,
});
digest = encrypted.digest;
iv = encrypted.iv;
sandbox = Sinon.createSandbox();
const originalGetPath = window.Signal.Migrations.getAbsoluteAttachmentPath;
sandbox
.stub(window.Signal.Migrations, 'getAbsoluteAttachmentPath')
.callsFake(relPath => {
if (relPath === plaintextFilePath) {
return plaintextFilePath;
}
return originalGetPath(relPath);
});
});
after(async () => {
sandbox.restore();
});
describe('v1 attachment', () => {
function composeAttachment(
overrides?: Partial<AttachmentType>
): LocallySavedAttachment {
return {
contentType: IMAGE_JPEG,
size,
iv: toBase64(iv),
key: toBase64(keys),
digest: toBase64(digest),
path: plaintextFilePath,
...overrides,
};
}
it('returns original attachment if reencryptability has already been checked', async () => {
const attachment = composeAttachment({
isReencryptableToSameDigest: true,
});
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(attachment, result);
});
it('marks attachment as reencryptable if it is', async () => {
const attachment = composeAttachment();
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: true },
result
);
});
it('marks attachment as unreencryptable and generates info if missing info', async () => {
const attachment = composeAttachment({ iv: undefined });
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: false },
omit(result, 'reencryptionInfo')
);
strictAssert(
result.isReencryptableToSameDigest === false,
'must be false'
);
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
});
it('marks attachment as unreencryptable and generates info if encrytion info exists but is wrong', async () => {
const attachment = composeAttachment({ iv: toBase64(randomBytes(16)) });
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: false },
omit(result, 'reencryptionInfo')
);
strictAssert(
result.isReencryptableToSameDigest === false,
'must be false'
);
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
});
});
describe('v2 attachment', () => {
let localKey: string;
let path: string;
before(async () => {
const encryptedLocally = await writeNewAttachmentData({
data: readFileSync(plaintextFilePath),
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
localKey = encryptedLocally.localKey;
path = encryptedLocally.path;
});
after(async () => {
if (path) {
await safeUnlink(
window.Signal.Migrations.getAbsoluteAttachmentPath(path)
);
}
});
function composeAttachment(
overrides?: Partial<AttachmentType>
): LocallySavedAttachment {
return {
contentType: IMAGE_JPEG,
size,
iv: toBase64(iv),
key: toBase64(keys),
digest: toBase64(digest),
path,
version: 2,
localKey,
...overrides,
};
}
it('returns original attachment if reencryptability has already been checked', async () => {
const attachment = composeAttachment({
isReencryptableToSameDigest: true,
});
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(attachment, result);
});
it('marks attachment as reencryptable if it is', async () => {
const attachment = composeAttachment();
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: true },
result
);
});
it('marks attachment as unreencryptable and generates info if missing info', async () => {
const attachment = composeAttachment({ iv: undefined });
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: false },
omit(result, 'reencryptionInfo')
);
strictAssert(
result.isReencryptableToSameDigest === false,
'must be false'
);
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
});
it('marks attachment as unreencryptable and generates info if encrytion info exists but is wrong', async () => {
const attachment = composeAttachment({ iv: toBase64(randomBytes(16)) });
const result = await ensureAttachmentIsReencryptable(attachment);
assert.deepStrictEqual(
{ ...attachment, isReencryptableToSameDigest: false },
omit(result, 'reencryptionInfo')
);
strictAssert(
result.isReencryptableToSameDigest === false,
'must be false'
);
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
});
});
});

View file

@ -7,10 +7,10 @@ import { join } from 'node:path';
import os from 'os';
import { readFile } from 'node:fs/promises';
import createDebug from 'debug';
import Long from 'long';
import { Proto, StorageState } from '@signalapp/mock-server';
import { assert } from 'chai';
import { expect } from 'playwright/test';
import Long from 'long';
import { generateStoryDistributionId } from '../../types/StoryDistributionId';
import { MY_STORY_ID } from '../../types/Stories';
@ -26,6 +26,9 @@ import {
sendTextMessage,
sendReaction,
} from '../helpers';
import { toBase64 } from '../../Bytes';
import { strictAssert } from '../../util/assert';
import { BackupLevel } from '../../services/backups/types';
export const debug = createDebug('mock:test:backups');
@ -43,7 +46,7 @@ const CAT_PATH = join(
);
describe('backups', function (this: Mocha.Suite) {
this.timeout(100 * durations.MINUTE);
this.timeout(durations.MINUTE);
let bootstrap: Bootstrap;
let app: App;
@ -78,6 +81,7 @@ describe('backups', function (this: Mocha.Suite) {
givenName: phone.profileName,
readReceipts: true,
hasCompletedUsernameOnboarding: true,
backupTier: Long.fromNumber(BackupLevel.Paid),
});
state = state.addContact(friend, {
@ -128,7 +132,6 @@ describe('backups', function (this: Mocha.Suite) {
{
const window = await app.getWindow();
debug('wait for storage service sync to finish');
const leftPane = window.locator('#LeftPane');
@ -221,27 +224,35 @@ describe('backups', function (this: Mocha.Suite) {
IMAGE_JPEG
);
sends.push(
pinned.sendRaw(
sendTextMessage({
from: pinned,
to: desktop,
text: 'cat photo',
desktop,
{
dataMessage: {
timestamp: Long.fromNumber(catTimestamp),
attachments: [ciphertextCat],
},
},
{
timestamp: catTimestamp,
}
)
timestamp: catTimestamp,
attachments: [ciphertextCat],
})
);
await Promise.all(sends);
let catPlaintextHash: string;
{
const window = await app.getWindow();
await getMessageInTimelineByTimestamp(window, catTimestamp)
.locator('img')
.waitFor();
const [catMessage] = await app.getMessagesBySentAt(catTimestamp);
const [image] = catMessage.attachments ?? [];
strictAssert(image.plaintextHash, 'plaintextHash was calculated');
strictAssert(image.digest, 'digest was calculated at download time');
strictAssert(
ciphertextCat.digest,
'digest was calculated at upload time'
);
assert.strictEqual(image.digest, toBase64(ciphertextCat.digest));
catPlaintextHash = image.plaintextHash;
}
await exportBackupFn();
@ -251,7 +262,7 @@ describe('backups', function (this: Mocha.Suite) {
async (window, snapshot) => {
const leftPane = window.locator('#LeftPane');
const pinnedElem = leftPane.locator(
`[data-testid="${pinned.toContact().aci}"] >> "Photo"`
`[data-testid="${pinned.toContact().aci}"] >> "cat photo"`
);
debug('Waiting for messages to pinned contact to come through');
@ -303,7 +314,8 @@ describe('backups', function (this: Mocha.Suite) {
// Restart
await bootstrap.eraseStorage();
await server.removeAllCDNAttachments();
app = await bootstrap.link(getBootstrapLinkParams());
const bootstrapLinkParams = getBootstrapLinkParams();
app = await bootstrap.link(bootstrapLinkParams);
await app.waitForBackupImportComplete();
// Make sure that contact sync happens after backup import, otherwise the
@ -318,6 +330,19 @@ describe('backups', function (this: Mocha.Suite) {
.click();
}
{
const [catMessage] = await app.getMessagesBySentAt(catTimestamp);
const [image] = catMessage.attachments ?? [];
if (!bootstrapLinkParams.localBackup) {
strictAssert(
image.digest,
'digest was calculated after download from media tier'
);
assert.strictEqual(image.digest, toBase64(ciphertextCat.digest));
}
assert.strictEqual(image.plaintextHash, catPlaintextHash);
}
await comparator(app);
}

View file

@ -564,8 +564,9 @@ export class Bootstrap {
test?: Mocha.Runnable
): Promise<(app: App) => Promise<void>> {
const snapshots = new Array<{ name: string; data: Buffer }>();
const viewportSize = { width: 1000, height: 2000 } as const;
const window = await app.getWindow();
await window.setViewportSize(viewportSize);
await callback(window, async (name: string) => {
debug('creating screenshot');
snapshots.push({
@ -584,7 +585,7 @@ export class Bootstrap {
const before = snapshots.shift();
assert(before != null, 'No previous snapshot');
assert.strictEqual(before.name, name, 'Wrong snapshot order');
await anotherWindow.setViewportSize(viewportSize);
const after = await anotherWindow.screenshot();
const beforePng = PNG.sync.read(before.data);

View file

@ -258,7 +258,7 @@ export async function createGroup(
for (const member of otherMembers) {
state = state.addContact(member, {
whitelisted: true,
serviceE164: member.device.number,
e164: member.device.number,
identityKey: member.publicKey.serialize(),
profileKey: member.profileKey.serialize(),
givenName: member.profileName,

View file

@ -4,7 +4,6 @@
import createDebug from 'debug';
import { assert } from 'chai';
import { expect } from 'playwright/test';
import { readFile } from 'node:fs/promises';
import { type PrimaryDevice, StorageState } from '@signalapp/mock-server';
import * as path from 'path';
import type { App } from '../playwright';
@ -17,9 +16,6 @@ import {
} from '../helpers';
import * as durations from '../../util/durations';
import { strictAssert } from '../../util/assert';
import { toBase64 } from '../../Bytes';
import type { AttachmentWithNewReencryptionInfoType } from '../../types/Attachment';
import { IMAGE_JPEG } from '../../types/MIME';
export const debug = createDebug('mock:test:attachments');
@ -97,11 +93,6 @@ describe('attachments', function (this: Mocha.Suite) {
)[0];
strictAssert(sentMessage, 'message exists in DB');
const sentAttachment = sentMessage.attachments?.[0];
assert.isTrue(sentAttachment?.isReencryptableToSameDigest);
assert.isUndefined(
(sentAttachment as unknown as AttachmentWithNewReencryptionInfoType)
.reencryptionInfo
);
// For this test, just send back the same attachment that was uploaded to test a
// round-trip
@ -126,71 +117,8 @@ describe('attachments', function (this: Mocha.Suite) {
)[0];
strictAssert(incomingMessage, 'message exists in DB');
const incomingAttachment = incomingMessage.attachments?.[0];
assert.isTrue(incomingAttachment?.isReencryptableToSameDigest);
assert.isUndefined(
(incomingAttachment as unknown as AttachmentWithNewReencryptionInfoType)
.reencryptionInfo
);
assert.strictEqual(incomingAttachment?.key, sentAttachment?.key);
assert.strictEqual(incomingAttachment?.digest, sentAttachment?.digest);
});
it('receiving attachments with non-zero padding will cause new re-encryption info to be generated', async () => {
const page = await app.getWindow();
await page.getByTestId(pinned.device.aci).click();
const plaintextCat = await readFile(CAT_PATH);
const attachment = await bootstrap.storeAttachmentOnCDN(
// add non-zero byte to the end of the data; this will be considered padding
// when received since we will include the size of the un-appended data when
// sending
Buffer.concat([plaintextCat, Buffer.from([1])]),
IMAGE_JPEG
);
const incomingTimestamp = Date.now();
await sendTextMessage({
from: pinned,
to: bootstrap.desktop,
desktop: bootstrap.desktop,
text: 'Wait, that is MY cat! But now with weird padding!',
attachments: [
{
...attachment,
size: plaintextCat.byteLength,
},
],
timestamp: incomingTimestamp,
});
await expect(
getMessageInTimelineByTimestamp(page, incomingTimestamp).locator(
'img.module-image__image'
)
).toBeVisible();
const incomingMessage = (
await app.getMessagesBySentAt(incomingTimestamp)
)[0];
strictAssert(incomingMessage, 'message exists in DB');
const incomingAttachment = incomingMessage.attachments?.[0];
assert.isFalse(incomingAttachment?.isReencryptableToSameDigest);
assert.exists(incomingAttachment?.reencryptionInfo);
assert.exists(incomingAttachment?.reencryptionInfo.digest);
assert.strictEqual(
incomingAttachment?.key,
toBase64(attachment.key ?? new Uint8Array(0))
);
assert.strictEqual(
incomingAttachment?.digest,
toBase64(attachment.digest ?? new Uint8Array(0))
);
assert.notEqual(
incomingAttachment?.digest,
incomingAttachment.reencryptionInfo.digest
);
});
});

View file

@ -54,7 +54,7 @@ describe('messaging/expireTimerVersion', function (this: Mocha.Suite) {
state = state.addContact(stranger, {
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: undefined,
e164: undefined,
profileKey: stranger.profileKey.serialize(),
});

View file

@ -35,7 +35,7 @@ describe('messaging/relink', function (this: Mocha.Suite) {
});
state = state.addContact(first, {
serviceE164: first.device.number,
e164: first.device.number,
profileKey: first.profileKey.serialize(),
givenName: first.profileName,
@ -44,7 +44,7 @@ describe('messaging/relink', function (this: Mocha.Suite) {
});
state = state.addContact(second, {
serviceE164: second.device.number,
e164: second.device.number,
identityKey: second.publicKey.serialize(),
profileKey: second.profileKey.serialize(),
givenName: second.profileName,

View file

@ -100,7 +100,7 @@ describe('story/messaging', function (this: Mocha.Suite) {
for (const contact of [first, second]) {
state = state.addContact(contact, {
whitelisted: true,
serviceE164: contact.device.number,
e164: contact.device.number,
identityKey: contact.publicKey.serialize(),
profileKey: contact.profileKey.serialize(),
givenName: contact.profileName,

View file

@ -55,7 +55,7 @@ describe('pnp/accept gv2 invite', function (this: Mocha.Suite) {
whitelisted: true,
profileKey: undefined,
serviceE164: unknownPniContact.device.number,
e164: unknownPniContact.device.number,
},
ServiceIdKind.PNI
);

View file

@ -59,7 +59,7 @@ describe('pnp/merge', function (this: Mocha.Suite) {
identityKey: pniIdentityKey,
serviceE164: pniContact.device.number,
e164: pniContact.device.number,
givenName: 'PNI Contact',
},
ServiceIdKind.PNI
@ -69,7 +69,7 @@ describe('pnp/merge', function (this: Mocha.Suite) {
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: undefined,
e164: undefined,
identityKey: aciIdentityKey,
givenName: 'ACI Contact',
});
@ -283,7 +283,7 @@ describe('pnp/merge', function (this: Mocha.Suite) {
state = state.updateContact(pniContact, {
pni: undefined,
serviceE164: undefined,
e164: undefined,
unregisteredAtTimestamp: Long.fromNumber(bootstrap.getTimestamp()),
});
@ -296,7 +296,7 @@ describe('pnp/merge', function (this: Mocha.Suite) {
identityKey: pniIdentityKey,
serviceE164: pniContact.device.number,
e164: pniContact.device.number,
givenName: 'PNI Contact',
},
ServiceIdKind.PNI
@ -403,15 +403,15 @@ describe('pnp/merge', function (this: Mocha.Suite) {
throw new Error('Invalid record');
}
const { aci, serviceE164, pni } = contact;
const { aci, e164, pni } = contact;
if (aci === pniContact.device.aci) {
aciContacts += 1;
assert.strictEqual(pni, '');
assert.strictEqual(serviceE164, '');
assert.strictEqual(e164, '');
} else if (pni === toUntaggedPni(pniContact.device.pni)) {
pniContacts += 1;
assert.strictEqual(aci, '');
assert.strictEqual(serviceE164, pniContact.device.number);
assert.strictEqual(e164, pniContact.device.number);
}
}
assert.strictEqual(aciContacts, 1);
@ -473,7 +473,7 @@ describe('pnp/merge', function (this: Mocha.Suite) {
identityKey: aciIdentityKey,
serviceE164: aciContact.device.number,
e164: aciContact.device.number,
givenName: 'ACI Contact',
},
ServiceIdKind.ACI

View file

@ -56,7 +56,7 @@ describe('pnp/phone discovery', function (this: Mocha.Suite) {
identityKey: pniIdentityKey,
serviceE164: pniContact.device.number,
e164: pniContact.device.number,
},
ServiceIdKind.PNI
);

View file

@ -52,7 +52,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
contactA,
{
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
identityKey: contactA.getPublicKey(ServiceIdKind.PNI).serialize(),
pni: toUntaggedPni(contactA.device.pni),
givenName: 'ContactA',
@ -144,7 +144,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
{
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
pni: toUntaggedPni(updatedPni),
identityKey: contactA.getPublicKey(ServiceIdKind.PNI).serialize(),
},
@ -241,7 +241,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
{
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
pni: toUntaggedPni(contactB.device.pni),
// Key change - different identity key
@ -343,7 +343,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
{
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
pni: toUntaggedPni(contactB.device.pni),
// Note: No identityKey key provided here!
@ -474,7 +474,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
{
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
pni: toUntaggedPni(contactB.device.pni),
// Note: No identityKey key provided here!
@ -506,7 +506,7 @@ describe('pnp/PNI Change', function (this: Mocha.Suite) {
{
identityState: Proto.ContactRecord.IdentityState.DEFAULT,
whitelisted: true,
serviceE164: contactA.device.number,
e164: contactA.device.number,
pni: toUntaggedPni(contactA.device.pni),
},
ServiceIdKind.PNI

View file

@ -52,7 +52,7 @@ describe('pnp/username', function (this: Mocha.Suite) {
state = state.addContact(usernameContact, {
username: USERNAME,
serviceE164: undefined,
e164: undefined,
});
// Put contact into left pane

View file

@ -55,7 +55,7 @@ describe('challenge/receipts', function (this: Mocha.Suite) {
contact,
{
whitelisted: true,
serviceE164: contact.device.number,
e164: contact.device.number,
identityKey: contact.getPublicKey(ServiceIdKind.PNI).serialize(),
pni: toUntaggedPni(contact.device.pni),
givenName: 'Jamie',
@ -66,7 +66,7 @@ describe('challenge/receipts', function (this: Mocha.Suite) {
contactB,
{
whitelisted: true,
serviceE164: contactB.device.number,
e164: contactB.device.number,
identityKey: contactB.getPublicKey(ServiceIdKind.PNI).serialize(),
pni: toUntaggedPni(contactB.device.pni),
givenName: 'Kim',

View file

@ -6,9 +6,11 @@ import { assert } from 'chai';
import type { ReadableDB, WritableDB } from '../../sql/Interface';
import { jsonToObject, objectToJSON, sql, sqlJoin } from '../../sql/util';
import { createDB, updateToVersion, explain } from './helpers';
import type { LegacyAttachmentDownloadJobType } from '../../sql/migrations/1040-undownloaded-backed-up-media';
import type {
_AttachmentDownloadJobTypeV1030,
_AttachmentDownloadJobTypeV1040,
} from '../../sql/migrations/1040-undownloaded-backed-up-media';
import type { AttachmentType } from '../../types/Attachment';
import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload';
import { IMAGE_JPEG } from '../../types/MIME';
function getAttachmentDownloadJobs(
@ -28,7 +30,7 @@ function getAttachmentDownloadJobs(
}
type UnflattenedAttachmentDownloadJobType = Omit<
AttachmentDownloadJobType,
_AttachmentDownloadJobTypeV1040,
'digest' | 'contentType' | 'size' | 'source' | 'ciphertextSize'
>;
function insertNewJob(
@ -301,25 +303,27 @@ describe('SQL/updateToSchemaVersion1040', () => {
});
it('respects foreign key constraint on messageId', () => {
const job: Omit<AttachmentDownloadJobType, 'source' | 'ciphertextSize'> =
{
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 1970,
const job: Omit<
_AttachmentDownloadJobTypeV1040,
'source' | 'ciphertextSize'
> = {
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
sentAt: 2070,
active: false,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
};
},
receivedAt: 1970,
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
sentAt: 2070,
active: false,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
};
// throws if we don't add the message first
assert.throws(() => insertNewJob(db, job, false));
insertNewJob(db, job, true);
@ -460,7 +464,7 @@ describe('SQL/updateToSchemaVersion1040', () => {
function insertLegacyJob(
db: WritableDB,
job: Partial<LegacyAttachmentDownloadJobType>
job: Partial<_AttachmentDownloadJobTypeV1030>
): void {
db.prepare('INSERT OR REPLACE INTO messages (id) VALUES ($id)').run({
id: job.messageId ?? null,

View file

@ -5,13 +5,13 @@ import { assert } from 'chai';
import { omit } from 'lodash';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion, explain } from './helpers';
import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload';
import { jsonToObject, objectToJSON, sql } from '../../sql/util';
import { IMAGE_BMP } from '../../types/MIME';
import type { _AttachmentDownloadJobTypeV1040 } from '../../sql/migrations/1040-undownloaded-backed-up-media';
function insertOldJob(
db: WritableDB,
job: Omit<AttachmentDownloadJobType, 'source' | 'ciphertextSize'>,
job: Omit<_AttachmentDownloadJobTypeV1040, 'source' | 'ciphertextSize'>,
addMessageFirst: boolean = true
): void {
if (addMessageFirst) {
@ -86,7 +86,10 @@ describe('SQL/updateToSchemaVersion1180', () => {
});
it('adds source column with default standard to any existing jobs', async () => {
const job: Omit<AttachmentDownloadJobType, 'source' | 'ciphertextSize'> = {
const job: Omit<
_AttachmentDownloadJobTypeV1040,
'source' | 'ciphertextSize'
> = {
messageId: '123',
digest: 'digest',
attachmentType: 'attachment',

View file

@ -6,11 +6,11 @@ import { assert } from 'chai';
import { AttachmentDownloadSource, type WritableDB } from '../../sql/Interface';
import { objectToJSON, sql } from '../../sql/util';
import { createDB, updateToVersion, explain } from './helpers';
import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload';
import { IMAGE_JPEG } from '../../types/MIME';
import type { _AttachmentDownloadJobTypeV1040 } from '../../sql/migrations/1040-undownloaded-backed-up-media';
type UnflattenedAttachmentDownloadJobType = Omit<
AttachmentDownloadJobType,
_AttachmentDownloadJobTypeV1040,
'digest' | 'contentType' | 'size' | 'ciphertextSize'
>;

View file

@ -63,9 +63,6 @@ describe('Message', () => {
height: 20,
}),
doesAttachmentExist: async () => true,
// @ts-expect-error ensureAttachmentIsReencryptable has type guards that we don't
// implement here
ensureAttachmentIsReencryptable: async attachment => attachment,
getRegionCode: () => 'region-code',
logger,
makeImageThumbnail: async (_params: {
@ -826,30 +823,4 @@ describe('Message', () => {
assert.deepEqual(result, message);
});
});
describe('toVersion14: ensureAttachmentsAreReencryptable', () => {
it('migrates message if the file does not exist', async () => {
const message = getDefaultMessage({
schemaVersion: 13,
schemaMigrationAttempts: 0,
attachments: [
{
size: 128,
contentType: MIME.IMAGE_BMP,
path: 'no/file/here.png',
iv: 'iv',
digest: 'digest',
key: 'key',
},
],
});
const result = await Message.upgradeSchema(message, {
...getDefaultContext(),
doesAttachmentExist: async () => false,
maxVersion: 14,
});
assert.deepEqual({ ...message, schemaVersion: 14 }, result);
});
});
});

View file

@ -29,6 +29,7 @@ import {
signalDecrypt,
signalDecryptPreKey,
SignalMessage,
UsePQRatchet,
} from '@signalapp/libsignal-client';
import {
@ -1777,7 +1778,8 @@ export default class MessageReceiver
identityKeyStore,
preKeyStore,
signedPreKeyStore,
kyberPreKeyStore
kyberPreKeyStore,
UsePQRatchet.No
);
}
return signalDecrypt(
@ -1904,7 +1906,8 @@ export default class MessageReceiver
identityKeyStore,
preKeyStore,
signedPreKeyStore,
kyberPreKeyStore
kyberPreKeyStore,
UsePQRatchet.No
)
),
zone

View file

@ -431,7 +431,7 @@ export class Provisioner {
.toAppUrl({
uuid,
pubKey: Bytes.toBase64(cipher.getPublicKey().serialize()),
capabilities: isLinkAndSyncEnabled() ? ['backup3'] : [],
capabilities: isLinkAndSyncEnabled() ? ['backup3', 'backup4'] : [],
})
.toString();

View file

@ -6,7 +6,7 @@ import type * as client from '@signalapp/libsignal-client';
import type { SignalService as Proto } from '../protobuf';
import type { IncomingWebSocketRequest } from './WebsocketResources';
import type { ServiceIdString, AciString, PniString } from '../types/ServiceId';
import type { AttachmentType, TextAttachmentType } from '../types/Attachment';
import type { TextAttachmentType } from '../types/Attachment';
import type { GiftBadgeStates } from '../components/conversation/Message';
import type { MIMEType } from '../types/MIME';
import type { DurationInSeconds } from '../util/durations';
@ -117,7 +117,6 @@ export type ProcessedAttachment = {
blurHash?: string;
cdnNumber?: number;
textAttachment?: Omit<TextAttachmentType, 'preview'>;
backupLocator?: AttachmentType['backupLocator'];
uploadTimestamp?: number;
downloadPath?: string;
incrementalMac?: string;

View file

@ -14,10 +14,11 @@ import * as Errors from '../types/errors';
import { strictAssert } from '../util/assert';
import {
AttachmentSizeError,
mightBeOnBackupTier,
type AttachmentType,
AttachmentVariant,
AttachmentPermanentlyUndownloadableError,
hasRequiredInformationForBackup,
type BackupableAttachmentType,
} from '../types/Attachment';
import * as Bytes from '../Bytes';
import {
@ -27,6 +28,7 @@ import {
type ReencryptedAttachmentV2,
decryptAndReencryptLocally,
measureSize,
type IntegrityCheckType,
} from '../AttachmentCrypto';
import type { ProcessedAttachment } from './Types.d';
import type { WebAPIType } from './WebAPI';
@ -58,7 +60,7 @@ export function getCdnKey(attachment: ProcessedAttachment): string {
}
export function getBackupMediaOuterEncryptionKeyMaterial(
attachment: AttachmentType
attachment: BackupableAttachmentType
): BackupMediaKeyMaterialType {
const mediaId = getMediaIdForAttachment(attachment);
const backupKey = getBackupMediaRootKey();
@ -66,14 +68,14 @@ export function getBackupMediaOuterEncryptionKeyMaterial(
}
function getBackupThumbnailInnerEncryptionKeyMaterial(
attachment: AttachmentType
attachment: BackupableAttachmentType
): BackupMediaKeyMaterialType {
const mediaId = getMediaIdForAttachmentThumbnail(attachment);
const backupKey = getBackupMediaRootKey();
return deriveBackupMediaKeyMaterial(backupKey, mediaId.bytes);
}
function getBackupThumbnailOuterEncryptionKeyMaterial(
attachment: AttachmentType
attachment: BackupableAttachmentType
): BackupMediaKeyMaterialType {
const mediaId = getMediaIdForAttachmentThumbnail(attachment);
const backupKey = getBackupMediaRootKey();
@ -81,13 +83,9 @@ function getBackupThumbnailOuterEncryptionKeyMaterial(
}
export async function getCdnNumberForBackupTier(
attachment: ProcessedAttachment
attachment: BackupableAttachmentType
): Promise<number> {
strictAssert(
attachment.backupLocator,
'Attachment was missing backupLocator'
);
let backupCdnNumber = attachment.backupLocator.cdnNumber;
let { backupCdnNumber } = attachment;
if (backupCdnNumber == null) {
const mediaId = getMediaIdForAttachment(attachment);
@ -106,11 +104,15 @@ export async function getCdnNumberForBackupTier(
export async function downloadAttachment(
server: WebAPIType,
attachment: ProcessedAttachment,
{
attachment,
mediaTier,
}:
| { attachment: AttachmentType; mediaTier: MediaTier.STANDARD }
| { attachment: BackupableAttachmentType; mediaTier: MediaTier.BACKUP },
options: {
disableRetries?: boolean;
logPrefix?: string;
mediaTier?: MediaTier;
onSizeUpdate: (totalBytes: number) => void;
timeout?: number;
variant: AttachmentVariant;
@ -119,20 +121,20 @@ export async function downloadAttachment(
): Promise<ReencryptedAttachmentV2> {
const logId = `downloadAttachment/${options.logPrefix ?? ''}`;
const { digest, incrementalMac, chunkSize, key, size } = attachment;
const { digest, plaintextHash, incrementalMac, chunkSize, key, size } =
attachment;
try {
strictAssert(digest, `${logId}: missing digest`);
strictAssert(
digest || plaintextHash,
`${logId}: missing digest and plaintextHash`
);
strictAssert(key, `${logId}: missing key`);
strictAssert(isNumber(size), `${logId}: missing size`);
} catch (error) {
throw new AttachmentPermanentlyUndownloadableError(error.message);
}
const mediaTier =
options?.mediaTier ??
(mightBeOnBackupTier(attachment) ? MediaTier.BACKUP : MediaTier.STANDARD);
let downloadResult: Awaited<ReturnType<typeof downloadToDisk>>;
let { downloadPath } = attachment;
@ -169,6 +171,12 @@ export async function downloadAttachment(
if (downloadOffset !== 0) {
log.info(`${logId}: resuming from ${downloadOffset}`);
}
if (mediaTier === MediaTier.BACKUP) {
strictAssert(
hasRequiredInformationForBackup(attachment),
`${logId}: attachment missing critical information for backup tier`
);
}
if (mediaTier === MediaTier.STANDARD) {
strictAssert(
@ -197,6 +205,8 @@ export async function downloadAttachment(
size,
});
} else {
strictAssert(mediaTier === MediaTier.BACKUP, 'backup media tier');
const mediaId =
options.variant === AttachmentVariant.ThumbnailFromBackup
? getMediaIdForAttachmentThumbnail(attachment)
@ -244,6 +254,21 @@ export async function downloadAttachment(
case AttachmentVariant.Default:
case undefined: {
const { aesKey, macKey } = splitKeys(Bytes.fromBase64(key));
let integrityCheck: IntegrityCheckType;
if (plaintextHash) {
integrityCheck = {
type: 'plaintext',
plaintextHash: Bytes.fromHex(plaintextHash),
};
} else if (digest) {
integrityCheck = {
type: 'encrypted',
digest: Bytes.fromBase64(digest),
};
} else {
throw new Error(`${logId}: missing both digest and plaintextHash`);
}
return await decryptAndReencryptLocally({
type: 'standard',
ciphertextPath: cipherTextAbsolutePath,
@ -251,7 +276,7 @@ export async function downloadAttachment(
aesKey,
macKey,
size,
theirDigest: Bytes.fromBase64(digest),
integrityCheck,
theirIncrementalMac: incrementalMac
? Bytes.fromBase64(incrementalMac)
: undefined,

View file

@ -9,6 +9,7 @@ import {
processPreKeyBundle,
ProtocolAddress,
PublicKey,
UsePQRatchet,
} from '@signalapp/libsignal-client';
import {
@ -193,7 +194,8 @@ async function handleServerKeys(
preKeyBundle,
protocolAddress,
sessionStore,
identityKeyStore
identityKeyStore,
UsePQRatchet.No
)
);
} catch (error) {

View file

@ -33,13 +33,16 @@ import { ThemeType } from './Util';
import * as GoogleChrome from '../util/GoogleChrome';
import { ReadStatus } from '../messages/MessageReadStatus';
import type { MessageStatusType } from '../components/conversation/Message';
import { strictAssert } from '../util/assert';
import type { SignalService as Proto } from '../protobuf';
import { isMoreRecentThan } from '../util/timestamp';
import { DAY } from '../util/durations';
import { getMessageQueueTime } from '../util/getMessageQueueTime';
import { getLocalAttachmentUrl } from '../util/getLocalAttachmentUrl';
import type { ReencryptionInfo } from '../AttachmentCrypto';
import {
isValidAttachmentKey,
isValidDigest,
isValidPlaintextHash,
} from './Crypto';
import { redactGenericText } from '../util/privacy';
import { missingCaseError } from '../util/missingCaseError';
@ -94,6 +97,10 @@ export type EphemeralAttachmentFields = {
schemaVersion?: number;
/** @deprecated Legacy field, replaced by cdnKey */
cdnId?: string;
/** @deprecated Legacy fields, no longer needed */
iv?: never;
isReencryptableToSameDigest?: never;
reencryptionInfo?: never;
};
/**
@ -125,7 +132,6 @@ export type AttachmentType = EphemeralAttachmentFields & {
cdnKey?: string;
downloadPath?: string;
key?: string;
iv?: string;
textAttachment?: TextAttachmentType;
wasTooBig?: boolean;
@ -135,11 +141,7 @@ export type AttachmentType = EphemeralAttachmentFields & {
incrementalMac?: string;
chunkSize?: number;
backupLocator?: {
mediaName: string;
cdnNumber?: number;
};
backupCdnNumber?: number;
localBackupPath?: string;
// See app/attachment_channel.ts
@ -149,15 +151,7 @@ export type AttachmentType = EphemeralAttachmentFields & {
/** For quote attachments, if copied from the referenced attachment */
copied?: boolean;
} & (
| {
isReencryptableToSameDigest?: true;
}
| {
isReencryptableToSameDigest: false;
reencryptionInfo?: ReencryptionInfo;
}
);
};
export type LocalAttachmentV2Type = Readonly<{
version: 2;
@ -1190,53 +1184,68 @@ export const canBeDownloaded = (
return Boolean(attachment.digest && attachment.key && !attachment.wasTooBig);
};
export function getAttachmentSignature(attachment: AttachmentType): string {
strictAssert(attachment.digest, 'attachment missing digest');
return attachment.digest;
export function doAttachmentsOnSameMessageMatch(
attachmentA: AttachmentType,
attachmentB: AttachmentType
): boolean {
if (
isValidPlaintextHash(attachmentA.plaintextHash) &&
isValidPlaintextHash(attachmentB.plaintextHash)
) {
return attachmentA.plaintextHash === attachmentB.plaintextHash;
}
if (isValidDigest(attachmentA.digest) && isValidDigest(attachmentB.digest)) {
return attachmentA.digest === attachmentB.digest;
}
return false;
}
export function getAttachmentSignatureSafe(
// TODO: DESKTOP-8910
// This "undownloaded" attachment signature can change once the file is downloaded; we may
// start with only the digest or plaintextHash, but both will be filled in by the time
// it's downloaded
export function getUndownloadedAttachmentSignature(
attachment: AttachmentType
): string | undefined {
try {
return getAttachmentSignature(attachment);
} catch {
return undefined;
): string {
return `${attachment.digest}.${attachment.plaintextHash}`;
}
export function cacheAttachmentBySignature(
attachmentMap: Map<string, AttachmentType>,
attachment: AttachmentType
): void {
const { digest, plaintextHash } = attachment;
if (digest) {
attachmentMap.set(digest, attachment);
}
if (plaintextHash) {
attachmentMap.set(plaintextHash, attachment);
}
}
type RequiredPropertiesForDecryption = 'key' | 'digest';
type RequiredPropertiesForReencryption = 'path' | 'key' | 'digest' | 'iv';
type DecryptableAttachment = WithRequiredProperties<
AttachmentType,
RequiredPropertiesForDecryption
>;
export type AttachmentWithNewReencryptionInfoType = Omit<
AttachmentType,
'isReencryptableToSameDigest'
> & {
isReencryptableToSameDigest: false;
reencryptionInfo: ReencryptionInfo;
};
type AttachmentReencryptableToExistingDigestType = Omit<
WithRequiredProperties<AttachmentType, RequiredPropertiesForReencryption>,
'isReencryptableToSameDigest'
> & { isReencryptableToSameDigest: true };
export type ReencryptableAttachment =
| AttachmentWithNewReencryptionInfoType
| AttachmentReencryptableToExistingDigestType;
export function getCachedAttachmentBySignature<T>(
attachmentMap: Map<string, T>,
attachment: AttachmentType
): T | undefined {
const { digest, plaintextHash } = attachment;
if (digest) {
if (attachmentMap.has(digest)) {
return attachmentMap.get(digest);
}
}
if (plaintextHash) {
if (attachmentMap.has(plaintextHash)) {
return attachmentMap.get(plaintextHash);
}
}
return undefined;
}
export type AttachmentDownloadableFromTransitTier = WithRequiredProperties<
DecryptableAttachment,
'cdnKey' | 'cdnNumber'
>;
export type AttachmentDownloadableFromBackupTier = WithRequiredProperties<
DecryptableAttachment,
'backupLocator'
AttachmentType,
'key' | 'digest' | 'cdnKey' | 'cdnNumber'
>;
export type LocallySavedAttachment = WithRequiredProperties<
@ -1244,43 +1253,6 @@ export type LocallySavedAttachment = WithRequiredProperties<
'path'
>;
export function isDecryptable(
attachment: AttachmentType
): attachment is DecryptableAttachment {
return Boolean(attachment.key) && Boolean(attachment.digest);
}
export function hasAllOriginalEncryptionInfo(
attachment: AttachmentType
): attachment is WithRequiredProperties<
AttachmentType,
'iv' | 'key' | 'digest'
> {
return (
Boolean(attachment.iv) &&
Boolean(attachment.key) &&
Boolean(attachment.digest)
);
}
export function isReencryptableToSameDigest(
attachment: AttachmentType
): attachment is AttachmentReencryptableToExistingDigestType {
return (
hasAllOriginalEncryptionInfo(attachment) &&
Boolean(attachment.isReencryptableToSameDigest)
);
}
export function isReencryptableWithNewEncryptionInfo(
attachment: AttachmentType
): attachment is AttachmentWithNewReencryptionInfoType {
return (
attachment.isReencryptableToSameDigest === false &&
Boolean(attachment.reencryptionInfo)
);
}
// Extend range in case the attachment is actually still there (this function is meant to
// be optimistic)
const BUFFER_TIME_ON_TRANSIT_TIER = 5 * DAY;
@ -1312,46 +1284,75 @@ export function mightStillBeOnTransitTier(
return false;
}
export function mightBeOnBackupTier(
attachment: Pick<AttachmentType, 'backupLocator'>
): boolean {
return Boolean(attachment.backupLocator?.mediaName);
export type BackupableAttachmentType = WithRequiredProperties<
AttachmentType,
'plaintextHash' | 'key'
>;
export function hasRequiredInformationForBackup(
attachment: AttachmentType
): attachment is BackupableAttachmentType {
return (
isValidAttachmentKey(attachment.key) &&
isValidPlaintextHash(attachment.plaintextHash)
);
}
export function mightBeInLocalBackup(
attachment: Pick<AttachmentType, 'localBackupPath' | 'localKey'>
): boolean {
return Boolean(attachment.localBackupPath && attachment.localKey);
export function wasImportedFromLocalBackup(
attachment: AttachmentType
): attachment is BackupableAttachmentType {
return (
hasRequiredInformationForBackup(attachment) &&
Boolean(attachment.localBackupPath) &&
isValidAttachmentKey(attachment.localKey)
);
}
export function isDownloadableFromTransitTier(
export function canAttachmentHaveThumbnail({
contentType,
}: Pick<AttachmentType, 'contentType'>): boolean {
return isVideoTypeSupported(contentType) || isImageTypeSupported(contentType);
}
export function hasRequiredInformationToDownloadFromTransitTier(
attachment: AttachmentType
): attachment is AttachmentDownloadableFromTransitTier {
if (!isDecryptable(attachment)) {
const hasIntegrityCheck =
isValidDigest(attachment.digest) ||
isValidPlaintextHash(attachment.plaintextHash);
if (!hasIntegrityCheck) {
return false;
}
if (attachment.cdnKey && attachment.cdnNumber != null) {
return true;
if (!isValidAttachmentKey(attachment.key)) {
return false;
}
return false;
if (!attachment.cdnKey || attachment.cdnNumber == null) {
return false;
}
return true;
}
export function isDownloadableFromBackupTier(
attachment: AttachmentType
): attachment is AttachmentDownloadableFromBackupTier {
if (!attachment.key || !attachment.digest) {
return false;
}
if (attachment.backupLocator?.mediaName) {
return true;
}
return false;
export function shouldAttachmentEndUpInRemoteBackup({
attachment,
hasMediaBackups,
}: {
attachment: AttachmentType;
hasMediaBackups: boolean;
}): boolean {
return hasMediaBackups && hasRequiredInformationForBackup(attachment);
}
export function isDownloadable(attachment: AttachmentType): boolean {
return (
isDownloadableFromTransitTier(attachment) ||
isDownloadableFromBackupTier(attachment)
hasRequiredInformationToDownloadFromTransitTier(attachment) ||
shouldAttachmentEndUpInRemoteBackup({
attachment,
// TODO: DESKTOP-8905
hasMediaBackups: true,
})
);
}

View file

@ -19,8 +19,6 @@ export type StandardAttachmentBackupJobType = {
path: string | null;
contentType: MIMEType;
keys: string;
digest: string;
iv: string;
transitCdnInfo?: {
cdnKey: string;
cdnNumber: number;
@ -72,8 +70,6 @@ const standardBackupJobDataSchema = z.object({
size: z.number(),
contentType: MIMETypeSchema,
keys: z.string(),
iv: z.string(),
digest: z.string(),
transitCdnInfo: z
.object({
cdnKey: z.string(),

View file

@ -32,7 +32,7 @@ export type CoreAttachmentDownloadJobType = {
attachmentType: AttachmentDownloadJobTypeType;
ciphertextSize: number;
contentType: MIMEType;
digest: string;
attachmentSignature: string;
isManualDownload?: boolean;
messageId: string;
receivedAt: number;
@ -51,7 +51,7 @@ export const coreAttachmentDownloadJobSchema = z.object({
attachmentType: attachmentDownloadTypeSchema,
ciphertextSize: z.number(),
contentType: MIMETypeSchema,
digest: z.string(),
attachmentSignature: z.string(),
isManualDownload: z.boolean().optional(),
messageId: z.string(),
messageIdForLogging: z.string().optional(),

View file

@ -1,6 +1,8 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { fromBase64, fromHex } from '../Bytes';
export enum HashType {
size256 = 'sha256',
size512 = 'sha512',
@ -16,6 +18,44 @@ export const UUID_BYTE_SIZE = 16;
export const IV_LENGTH = 16;
export const KEY_LENGTH = 32;
export const AES_KEY_LENGTH = 32;
export const MAC_LENGTH = 32;
export const ATTACHMENT_MAC_LENGTH = MAC_LENGTH;
export const DIGEST_LENGTH = 32;
export const PLAINTEXT_HASH_LENGTH = 32;
export const KEY_SET_LENGTH = AES_KEY_LENGTH + MAC_LENGTH;
export function isValidAttachmentKey(
keyBase64: string | undefined
): keyBase64 is string {
if (typeof keyBase64 !== 'string') {
return false;
}
const bytes = fromBase64(keyBase64);
return bytes.byteLength > 0;
}
export function isValidDigest(
digestBase64: string | undefined
): digestBase64 is string {
if (typeof digestBase64 !== 'string') {
return false;
}
const bytes = fromBase64(digestBase64);
return bytes.byteLength > 0;
}
export function isValidPlaintextHash(
plaintextHashHex: string | undefined
): plaintextHashHex is string {
if (typeof plaintextHashHex !== 'string') {
return false;
}
const bytes = fromHex(plaintextHashHex);
return bytes.byteLength > 0;
}

View file

@ -10,13 +10,9 @@ import type {
AttachmentType,
AttachmentWithHydratedData,
LocalAttachmentV2Type,
LocallySavedAttachment,
ReencryptableAttachment,
} from './Attachment';
import {
captureDimensionsAndScreenshot,
getAttachmentIdForLogging,
isAttachmentLocallySaved,
removeSchemaVersion,
replaceUnicodeOrderOverrides,
replaceUnicodeV2,
@ -57,9 +53,6 @@ export const PRIVATE = 'private';
export type ContextType = {
doesAttachmentExist: (relativePath: string) => Promise<boolean>;
ensureAttachmentIsReencryptable: (
attachment: LocallySavedAttachment
) => Promise<ReencryptableAttachment>;
getImageDimensions: (params: {
objectUrl: string;
logger: LoggerType;
@ -138,7 +131,7 @@ export type ContextType = {
// Version 13:
// - Attachments: write bodyAttachment to disk
// Version 14
// - All attachments: ensure they are reencryptable to a known digest
// - DEPRECATED: All attachments: ensure they are reencryptable to a known digest
// Version 15
// - A noop migration to cause attachments to be normalized when the message is saved
@ -660,36 +653,13 @@ const toVersion13 = _withSchemaVersion({
upgrade: migrateBodyAttachmentToDisk,
});
// NOOP: Used to call ensureAttachmentIsReencryptable
const toVersion14 = _withSchemaVersion({
schemaVersion: 14,
upgrade: _mapAllAttachments(
async (
attachment,
{ logger, ensureAttachmentIsReencryptable, doesAttachmentExist }
) => {
if (!isAttachmentLocallySaved(attachment)) {
return attachment;
}
if (!(await doesAttachmentExist(attachment.path))) {
// Attachments may be missing, e.g. for quote thumbnails that reference messages
// which have been deleted
logger.info(
`Message2.toVersion14(id=${getAttachmentIdForLogging(attachment)}: File does not exist`
);
return attachment;
}
if (!attachment.digest) {
// Messages that are being upgraded prior to being sent may not have encrypted the
// attachment yet
return attachment;
}
return ensureAttachmentIsReencryptable(attachment);
}
),
upgrade: noopUpgrade,
});
// NOOP: used to trigger saves into the new message_attachments table
const toVersion15 = _withSchemaVersion({
schemaVersion: 15,
upgrade: noopUpgrade,
@ -726,7 +696,6 @@ export const upgradeSchema = async (
readAttachmentData,
writeNewAttachmentData,
doesAttachmentExist,
ensureAttachmentIsReencryptable,
getRegionCode,
makeObjectUrl,
revokeObjectUrl,
@ -766,7 +735,6 @@ export const upgradeSchema = async (
makeObjectUrl,
revokeObjectUrl,
doesAttachmentExist,
ensureAttachmentIsReencryptable,
getImageDimensions,
makeImageThumbnail,
makeVideoScreenshot,
@ -798,7 +766,6 @@ export const upgradeSchema = async (
export const processNewAttachment = async (
attachment: AttachmentType,
{
ensureAttachmentIsReencryptable,
writeNewAttachmentData,
makeObjectUrl,
revokeObjectUrl,
@ -816,7 +783,6 @@ export const processNewAttachment = async (
| 'makeVideoScreenshot'
| 'logger'
| 'deleteOnDisk'
| 'ensureAttachmentIsReencryptable'
>
): Promise<AttachmentType> => {
if (!isFunction(writeNewAttachmentData)) {
@ -841,25 +807,15 @@ export const processNewAttachment = async (
throw new TypeError('context.logger is required');
}
let upgradedAttachment = attachment;
if (isAttachmentLocallySaved(upgradedAttachment)) {
upgradedAttachment =
await ensureAttachmentIsReencryptable(upgradedAttachment);
}
const finalAttachment = await captureDimensionsAndScreenshot(
upgradedAttachment,
{
writeNewAttachmentData,
makeObjectUrl,
revokeObjectUrl,
getImageDimensions,
makeImageThumbnail,
makeVideoScreenshot,
logger,
}
);
const finalAttachment = await captureDimensionsAndScreenshot(attachment, {
writeNewAttachmentData,
makeObjectUrl,
revokeObjectUrl,
getImageDimensions,
makeImageThumbnail,
makeVideoScreenshot,
logger,
});
return finalAttachment;
};

View file

@ -243,6 +243,7 @@ export type StorageAccessType = {
// Stored solely for pesistance during import/export sequence
svrPin: string;
optimizeOnDeviceStorage: boolean;
postRegistrationSyncsStatus: 'incomplete' | 'complete';

View file

@ -108,8 +108,6 @@ export function copyCdnFields(
? Bytes.toBase64(uploaded.incrementalMac)
: undefined,
chunkSize: dropNull(uploaded.chunkSize),
isReencryptableToSameDigest: uploaded.isReencryptableToSameDigest,
iv: Bytes.toBase64(uploaded.iv),
key: Bytes.toBase64(uploaded.key),
plaintextHash: uploaded.plaintextHash,
uploadTimestamp: uploaded.uploadTimestamp?.toNumber(),

View file

@ -12,10 +12,7 @@ import { strictAssert } from './assert';
* Gets the IV from the start of the stream and creates a decipher.
* Then deciphers the rest of the stream.
*/
export function getIvAndDecipher(
aesKey: Uint8Array,
onFoundIv?: (iv: Buffer) => void
): Transform {
export function decipherWithAesKey(aesKey: Uint8Array): Transform {
let maybeIvBytes: Buffer | null = Buffer.alloc(0);
let decipher: Decipher | null = null;
return new Transform({
@ -39,7 +36,6 @@ export function getIvAndDecipher(
// remainder of the bytes through.
const iv = maybeIvBytes.subarray(0, IV_LENGTH);
const remainder = maybeIvBytes.subarray(IV_LENGTH);
onFoundIv?.(iv);
maybeIvBytes = null; // free memory
decipher = createDecipheriv(CipherType.AES256CBC, aesKey, iv);
callback(null, decipher.update(remainder));

View file

@ -3,11 +3,11 @@
import {
type AttachmentType,
mightBeOnBackupTier,
AttachmentVariant,
AttachmentPermanentlyUndownloadableError,
getAttachmentIdForLogging,
mightBeInLocalBackup,
hasRequiredInformationForBackup,
wasImportedFromLocalBackup,
} from '../types/Attachment';
import { downloadAttachment as doDownloadAttachment } from '../textsecure/downloadAttachment';
import { downloadAttachmentFromLocalBackup as doDownloadAttachmentFromLocalBackup } from './downloadAttachmentFromLocalBackup';
@ -21,7 +21,12 @@ const log = createLogger('downloadAttachment');
export async function downloadAttachment({
attachment,
options: { variant = AttachmentVariant.Default, onSizeUpdate, abortSignal },
options: {
variant = AttachmentVariant.Default,
onSizeUpdate,
abortSignal,
hasMediaBackups,
},
dependencies = {
downloadAttachmentFromServer: doDownloadAttachment,
downloadAttachmentFromLocalBackup: doDownloadAttachmentFromLocalBackup,
@ -32,6 +37,7 @@ export async function downloadAttachment({
variant?: AttachmentVariant;
onSizeUpdate: (totalBytes: number) => void;
abortSignal: AbortSignal;
hasMediaBackups: boolean;
};
dependencies?: {
downloadAttachmentFromServer: typeof doDownloadAttachment;
@ -49,19 +55,12 @@ export async function downloadAttachment({
throw new Error('window.textsecure.server is not available!');
}
let migratedAttachment: AttachmentType;
const isBackupable = hasRequiredInformationForBackup(attachment);
const { id: legacyId } = attachment;
if (legacyId === undefined) {
migratedAttachment = attachment;
} else {
migratedAttachment = {
...attachment,
cdnId: String(legacyId),
};
}
const mightBeOnBackupTierNow = isBackupable && hasMediaBackups;
const mightBeOnBackupTierInTheFuture = isBackupable;
if (mightBeInLocalBackup(attachment)) {
if (wasImportedFromLocalBackup(attachment)) {
log.info(`${logId}: Downloading attachment from local backup`);
try {
const result =
@ -78,14 +77,13 @@ export async function downloadAttachment({
}
}
if (mightBeOnBackupTier(migratedAttachment)) {
if (mightBeOnBackupTierNow) {
try {
return await dependencies.downloadAttachmentFromServer(
server,
migratedAttachment,
{ mediaTier: MediaTier.BACKUP, attachment },
{
logPrefix: dataId,
mediaTier: MediaTier.BACKUP,
onSizeUpdate,
variant,
abortSignal,
@ -121,21 +119,21 @@ export async function downloadAttachment({
try {
return await dependencies.downloadAttachmentFromServer(
server,
migratedAttachment,
{ attachment, mediaTier: MediaTier.STANDARD },
{
logPrefix: dataId,
mediaTier: MediaTier.STANDARD,
onSizeUpdate,
variant,
abortSignal,
}
);
} catch (error) {
if (mightBeOnBackupTier(migratedAttachment)) {
if (mightBeOnBackupTierInTheFuture) {
// We don't want to throw the AttachmentPermanentlyUndownloadableError because we
// may just need to wait for this attachment to end up on the backup tier
throw error;
}
// Attachments on the transit tier expire after (message queue length + buffer) days,
// then start returning 404
if (error instanceof HTTPError && error.code === 404) {

View file

@ -4,7 +4,7 @@
import { existsSync } from 'node:fs';
import { isNumber } from 'lodash';
import {
type AttachmentType,
type BackupableAttachmentType,
getAttachmentIdForLogging,
} from '../types/Attachment';
import {
@ -16,7 +16,7 @@ import { strictAssert } from './assert';
export class AttachmentPermanentlyUndownloadableError extends Error {}
export async function downloadAttachmentFromLocalBackup(
attachment: AttachmentType
attachment: BackupableAttachmentType
): Promise<ReencryptedAttachmentV2> {
const attachmentId = getAttachmentIdForLogging(attachment);
const dataId = `${attachmentId}`;
@ -26,16 +26,16 @@ export async function downloadAttachmentFromLocalBackup(
}
async function doDownloadFromLocalBackup(
attachment: AttachmentType,
attachment: BackupableAttachmentType,
{
logId,
}: {
logId: string;
}
): Promise<ReencryptedAttachmentV2> {
const { digest, localBackupPath, localKey, size } = attachment;
const { plaintextHash, localBackupPath, localKey, size } = attachment;
strictAssert(digest, `${logId}: missing digest`);
strictAssert(plaintextHash, `${logId}: missing plaintextHash`);
strictAssert(localKey, `${logId}: missing localKey`);
strictAssert(localBackupPath, `${logId}: missing localBackupPath`);
strictAssert(isNumber(size), `${logId}: missing size`);

View file

@ -1,188 +0,0 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { PassThrough } from 'stream';
import {
type EncryptedAttachmentV2,
ReencryptedDigestMismatchError,
type ReencryptionInfo,
decryptAttachmentV2ToSink,
encryptAttachmentV2,
generateAttachmentKeys,
} from '../AttachmentCrypto';
import {
type AddressableAttachmentType,
type LocallySavedAttachment,
type ReencryptableAttachment,
hasAllOriginalEncryptionInfo,
isReencryptableToSameDigest,
isReencryptableWithNewEncryptionInfo,
getAttachmentIdForLogging,
} from '../types/Attachment';
import { strictAssert } from './assert';
import { createLogger } from '../logging/log';
import { fromBase64, toBase64 } from '../Bytes';
import { toLogFormat } from '../types/errors';
const logging = createLogger('ensureAttachmentIsReencryptable');
/**
* Some attachments on desktop are not reencryptable to the digest we received for them.
* This is because:
* 1. desktop has not always saved iv & key for attachments
* 2. android has in the past sent attachments with non-zero (random) padding
*
* In these cases we need to generate a new iv and key to recalculate a digest that we can
* put in the backup proto at export time.
*/
export async function ensureAttachmentIsReencryptable(
attachment: LocallySavedAttachment
): Promise<ReencryptableAttachment> {
if (isReencryptableToSameDigest(attachment)) {
return attachment;
}
if (isReencryptableWithNewEncryptionInfo(attachment)) {
return attachment;
}
if (hasAllOriginalEncryptionInfo(attachment)) {
try {
await attemptToReencryptToOriginalDigest(attachment);
return {
...attachment,
isReencryptableToSameDigest: true,
};
} catch (e) {
const logId = `ensureAttachmentIsReencryptable(digest=${getAttachmentIdForLogging(attachment)})`;
if (e instanceof ReencryptedDigestMismatchError) {
logging.info(
`${logId}: Unable to reencrypt attachment to original digest; must have had non-zero padding`
);
} else {
logging.error(`${logId}: error when reencrypting`, toLogFormat(e));
}
}
}
return {
...attachment,
isReencryptableToSameDigest: false,
reencryptionInfo: await generateNewEncryptionInfoForAttachment(attachment),
};
}
/** Will throw if attachment cannot be reencrypted to original digest */
export async function attemptToReencryptToOriginalDigest(
attachment: Readonly<LocallySavedAttachment>
): Promise<void> {
if (!hasAllOriginalEncryptionInfo(attachment)) {
throw new Error('attachment must have info for reencryption');
}
const { iv, key, digest } = attachment;
if (!attachment.localKey) {
await encryptAttachmentV2({
keys: fromBase64(key),
dangerousIv: {
iv: fromBase64(iv),
reason: 'reencrypting-for-backup',
digestToMatch: fromBase64(digest),
},
plaintext: {
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
attachment.path
),
},
needIncrementalMac: false,
});
} else {
strictAssert(attachment.size != null, 'Size must exist');
const passthrough = new PassThrough();
await Promise.all([
decryptAttachmentV2ToSink(
{
ciphertextPath: window.Signal.Migrations.getAbsoluteAttachmentPath(
attachment.path
),
idForLogging: 'attemptToReencryptToOriginalDigest',
size: attachment.size,
keysBase64: attachment.localKey,
type: 'local',
},
passthrough
),
encryptAttachmentV2({
plaintext: {
stream: passthrough,
size: attachment.size,
},
keys: fromBase64(key),
dangerousIv: {
iv: fromBase64(iv),
reason: 'reencrypting-for-backup',
digestToMatch: fromBase64(digest),
},
needIncrementalMac: false,
}),
]);
}
}
export async function generateNewEncryptionInfoForAttachment(
attachment: Readonly<AddressableAttachmentType>
): Promise<ReencryptionInfo> {
const newKeys = generateAttachmentKeys();
let encryptedAttachment: EncryptedAttachmentV2;
if (!attachment.localKey) {
encryptedAttachment = await encryptAttachmentV2({
keys: newKeys,
plaintext: {
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
attachment.path
),
},
needIncrementalMac: false,
});
} else {
const passthrough = new PassThrough();
strictAssert(attachment.size != null, 'Size must exist');
const result = await Promise.all([
decryptAttachmentV2ToSink(
{
ciphertextPath: window.Signal.Migrations.getAbsoluteAttachmentPath(
attachment.path
),
idForLogging: 'generateNewEncryptionInfoForAttachment',
size: attachment.size,
keysBase64: attachment.localKey,
type: 'local',
},
passthrough
),
encryptAttachmentV2({
keys: newKeys,
plaintext: {
stream: passthrough,
size: attachment.size,
},
needIncrementalMac: false,
}),
]);
// eslint-disable-next-line prefer-destructuring
encryptedAttachment = result[1];
}
return {
digest: toBase64(encryptedAttachment.digest),
iv: toBase64(encryptedAttachment.iv),
key: toBase64(newKeys),
};
}

View file

@ -6,16 +6,18 @@ import type { EditAttributesType } from '../messageModifiers/Edits';
import type {
EditHistoryType,
MessageAttributesType,
QuotedAttachmentType,
QuotedMessageType,
} from '../model-types.d';
import type { LinkPreviewType } from '../types/message/LinkPreviews';
import * as Edits from '../messageModifiers/Edits';
import { createLogger } from '../logging/log';
import { ReadStatus } from '../messages/MessageReadStatus';
import { DataWriter } from '../sql/Client';
import { drop } from './drop';
import { getAttachmentSignature, isVoiceMessage } from '../types/Attachment';
import {
cacheAttachmentBySignature,
getCachedAttachmentBySignature,
isVoiceMessage,
} from '../types/Attachment';
import { isAciString } from './isAciString';
import { getMessageIdForLogging } from './idForLogging';
import { hasErrors } from '../state/selectors/message';
@ -31,17 +33,6 @@ const log = createLogger('handleEditMessage');
const RECURSION_LIMIT = 15;
function getAttachmentSignatureSafe(
attachment: AttachmentType
): string | undefined {
try {
return getAttachmentSignature(attachment);
} catch {
log.warn('attachment was missing digest', attachment.blurHash);
return undefined;
}
}
export async function handleEditMessage(
mainMessage: MessageAttributesType,
editAttributes: Pick<
@ -143,43 +134,34 @@ export async function handleEditMessage(
// Copies over the attachments from the main message if they're the same
// and they have already been downloaded.
const attachmentSignatures: Map<string, AttachmentType> = new Map();
const previewSignatures: Map<string, LinkPreviewType> = new Map();
const quoteSignatures: Map<string, QuotedAttachmentType> = new Map();
const previewSignatures: Map<string, AttachmentType> = new Map();
const quoteSignatures: Map<string, AttachmentType> = new Map();
mainMessage.attachments?.forEach(attachment => {
const signature = getAttachmentSignatureSafe(attachment);
if (signature) {
attachmentSignatures.set(signature, attachment);
}
cacheAttachmentBySignature(attachmentSignatures, attachment);
});
mainMessage.preview?.forEach(preview => {
if (!preview.image) {
return;
}
const signature = getAttachmentSignatureSafe(preview.image);
if (signature) {
previewSignatures.set(signature, preview);
}
cacheAttachmentBySignature(previewSignatures, preview.image);
});
if (mainMessage.quote) {
for (const attachment of mainMessage.quote.attachments) {
if (!attachment.thumbnail) {
continue;
}
const signature = getAttachmentSignatureSafe(attachment.thumbnail);
if (signature) {
quoteSignatures.set(signature, attachment);
}
cacheAttachmentBySignature(quoteSignatures, attachment.thumbnail);
}
}
let newAttachments = 0;
const nextEditedMessageAttachments =
upgradedEditedMessageData.attachments?.map(attachment => {
const signature = getAttachmentSignatureSafe(attachment);
const existingAttachment = signature
? attachmentSignatures.get(signature)
: undefined;
const existingAttachment = getCachedAttachmentBySignature(
attachmentSignatures,
attachment
);
if (existingAttachment) {
return existingAttachment;
@ -196,12 +178,13 @@ export async function handleEditMessage(
return preview;
}
const signature = getAttachmentSignatureSafe(preview.image);
const existingPreview = signature
? previewSignatures.get(signature)
: undefined;
if (existingPreview) {
return existingPreview;
const existingPreviewImage = getCachedAttachmentBySignature(
previewSignatures,
preview.image
);
if (existingPreviewImage) {
return { ...preview, image: existingPreviewImage };
}
newPreviews += 1;
return preview;
@ -228,10 +211,12 @@ export async function handleEditMessage(
if (!attachment.thumbnail) {
return attachment;
}
const signature = getAttachmentSignatureSafe(attachment.thumbnail);
const existingQuoteAttachment = signature
? quoteSignatures.get(signature)
: undefined;
const existingQuoteAttachment = getCachedAttachmentBySignature(
quoteSignatures,
attachment.thumbnail
);
if (existingQuoteAttachment) {
return {
...attachment,

View file

@ -20,11 +20,13 @@ import type {
} from '../model-types.d';
import * as Errors from '../types/errors';
import {
getAttachmentSignatureSafe,
isDownloading,
isDownloaded,
isVoiceMessage,
partitionBodyAndNormalAttachments,
getCachedAttachmentBySignature,
cacheAttachmentBySignature,
getUndownloadedAttachmentSignature,
} from '../types/Attachment';
import { AttachmentDownloadUrgency } from '../types/AttachmentDownload';
import type { StickerType } from '../types/Stickers';
@ -118,12 +120,12 @@ export async function queueAttachmentDownloadsForMessage(
export async function queueAttachmentDownloads(
message: MessageModel,
{
attachmentDigestForImmediate,
attachmentSignatureForImmediate,
isManualDownload,
source = AttachmentDownloadSource.STANDARD,
urgency = AttachmentDownloadUrgency.STANDARD,
}: {
attachmentDigestForImmediate?: string;
attachmentSignatureForImmediate?: string;
isManualDownload: boolean;
source?: AttachmentDownloadSource;
urgency?: AttachmentDownloadUrgency;
@ -183,7 +185,7 @@ export async function queueAttachmentDownloads(
const startingAttachments = message.get('attachments') || [];
const { attachments, count: attachmentsCount } = await queueNormalAttachments(
{
attachmentDigestForImmediate,
attachmentSignatureForImmediate,
attachments: startingAttachments,
isManualDownload,
logId,
@ -440,7 +442,7 @@ export async function queueAttachmentDownloads(
}
export async function queueNormalAttachments({
attachmentDigestForImmediate,
attachmentSignatureForImmediate,
attachments = [],
isManualDownload,
logId,
@ -451,7 +453,7 @@ export async function queueNormalAttachments({
source,
urgency,
}: {
attachmentDigestForImmediate?: string;
attachmentSignatureForImmediate?: string;
attachments: MessageAttributesType['attachments'];
isManualDownload: boolean;
logId: string;
@ -474,10 +476,7 @@ export async function queueNormalAttachments({
// then not be added to the AttachmentDownloads job.
const attachmentSignatures: Map<string, AttachmentType> = new Map();
otherAttachments?.forEach(attachment => {
const signature = getAttachmentSignatureSafe(attachment);
if (signature) {
attachmentSignatures.set(signature, attachment);
}
cacheAttachmentBySignature(attachmentSignatures, attachment);
});
let count = 0;
@ -499,10 +498,10 @@ export async function queueNormalAttachments({
return attachment;
}
const signature = getAttachmentSignatureSafe(attachment);
const existingAttachment = signature
? attachmentSignatures.get(signature)
: undefined;
const existingAttachment = getCachedAttachmentBySignature(
attachmentSignatures,
attachment
);
// We've already downloaded this elsewhere!
if (
@ -547,8 +546,9 @@ export async function queueNormalAttachments({
count += 1;
const urgencyForAttachment =
attachmentDigestForImmediate &&
attachmentDigestForImmediate === attachment.digest
attachmentSignatureForImmediate &&
attachmentSignatureForImmediate ===
getUndownloadedAttachmentSignature(attachment)
? AttachmentDownloadUrgency.IMMEDIATE
: urgency;
return AttachmentDownloadManager.addJob({
@ -570,21 +570,6 @@ export async function queueNormalAttachments({
};
}
function getLinkPreviewSignature(preview: LinkPreviewType): string | undefined {
const { image, url } = preview;
if (!image) {
return;
}
const signature = getAttachmentSignatureSafe(image);
if (!signature) {
return;
}
return `<${url}>${signature}`;
}
async function queuePreviews({
isManualDownload,
logId,
@ -607,15 +592,11 @@ async function queuePreviews({
urgency: AttachmentDownloadUrgency;
}): Promise<{ preview: Array<LinkPreviewType>; count: number }> {
const log = getLogger(source);
// Similar to queueNormalAttachments' logic for detecting same attachments
// except here we also pick by link preview URL.
const previewSignatures: Map<string, LinkPreviewType> = new Map();
const previewSignatures: Map<string, AttachmentType> = new Map();
otherPreviews?.forEach(preview => {
const signature = getLinkPreviewSignature(preview);
if (!signature) {
return;
if (preview.image) {
cacheAttachmentBySignature(previewSignatures, preview.image);
}
previewSignatures.set(signature, preview);
});
let count = 0;
@ -630,21 +611,22 @@ async function queuePreviews({
log.info(`${logId}: Preview attachment already downloaded`);
return item;
}
const signature = getLinkPreviewSignature(item);
const existingPreview = signature
? previewSignatures.get(signature)
: undefined;
const existingPreviewImage = getCachedAttachmentBySignature(
previewSignatures,
item.image
);
// We've already downloaded this elsewhere!
if (
existingPreview &&
(isDownloading(existingPreview.image) ||
isDownloaded(existingPreview.image))
existingPreviewImage &&
(isDownloading(existingPreviewImage) ||
isDownloaded(existingPreviewImage))
) {
log.info(`${logId}: Preview already downloaded elsewhere. Replacing`);
// Incrementing count so that we update the message's fields downstream
count += 1;
return existingPreview;
return { ...item, image: existingPreviewImage };
}
if (!isManualDownload) {
@ -681,20 +663,6 @@ async function queuePreviews({
};
}
function getQuoteThumbnailSignature(
quote: QuotedMessageType,
thumbnail?: AttachmentType
): string | undefined {
if (!thumbnail) {
return undefined;
}
const signature = getAttachmentSignatureSafe(thumbnail);
if (!signature) {
return;
}
return `<${quote.id}>${signature}`;
}
async function queueQuoteAttachments({
isManualDownload,
logId,
@ -733,14 +701,9 @@ async function queueQuoteAttachments({
const thumbnailSignatures: Map<string, ThumbnailType> = new Map();
otherQuotes.forEach(otherQuote => {
for (const attachment of otherQuote.attachments) {
const signature = getQuoteThumbnailSignature(
otherQuote,
attachment.thumbnail
);
if (!signature || !attachment.thumbnail) {
continue;
if (attachment.thumbnail) {
cacheAttachmentBySignature(thumbnailSignatures, attachment.thumbnail);
}
thumbnailSignatures.set(signature, attachment.thumbnail);
}
});
@ -758,10 +721,10 @@ async function queueQuoteAttachments({
return item;
}
const signature = getQuoteThumbnailSignature(quote, item.thumbnail);
const existingThumbnail = signature
? thumbnailSignatures.get(signature)
: undefined;
const existingThumbnail = getCachedAttachmentBySignature(
thumbnailSignatures,
item.thumbnail
);
// We've already downloaded this elsewhere!
if (

View file

@ -7,45 +7,25 @@ import { strictAssert } from './assert';
/**
* Truncates the stream to the target size and analyzes padding type.
*/
export function trimPadding(
size: number,
onPaddingAnalyzed: ({
isPaddingAllZeros,
}: {
isPaddingAllZeros: boolean;
}) => void
): Transform {
export function trimPadding(size: number): Transform {
let total = 0;
let seenNonZeroPadding = false;
return new Transform({
transform(chunk, _encoding, callback) {
strictAssert(chunk instanceof Uint8Array, 'chunk must be Uint8Array');
const chunkSize = chunk.byteLength;
const sizeLeft = size - total;
let paddingInThisChunk: Uint8Array | undefined;
if (sizeLeft >= chunkSize) {
total += chunkSize;
callback(null, chunk);
} else if (sizeLeft > 0) {
total += sizeLeft;
const data = chunk.subarray(0, sizeLeft);
paddingInThisChunk = chunk.subarray(sizeLeft);
callback(null, data);
} else {
paddingInThisChunk = chunk;
callback(null, null);
}
if (
paddingInThisChunk &&
!seenNonZeroPadding &&
!paddingInThisChunk.every(el => el === 0)
) {
seenNonZeroPadding = true;
}
},
flush(callback) {
onPaddingAnalyzed({ isPaddingAllZeros: !seenNonZeroPadding });
callback();
},
});

View file

@ -18,7 +18,6 @@ import {
encryptAttachmentV2ToDisk,
safeUnlink,
type PlaintextSourceType,
type HardcodedIVForEncryptionType,
} from '../AttachmentCrypto';
import { missingCaseError } from './missingCaseError';
import { uuidToBytes } from './uuidToBytes';
@ -73,13 +72,11 @@ export async function uploadAttachment(
}
export async function encryptAndUploadAttachment({
dangerousIv,
keys,
needIncrementalMac,
plaintext,
uploadType,
}: {
dangerousIv?: HardcodedIVForEncryptionType;
keys: Uint8Array;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
@ -108,7 +105,6 @@ export async function encryptAndUploadAttachment({
}
const encrypted = await encryptAttachmentV2ToDisk({
dangerousIv,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
keys,

View file

@ -18,7 +18,6 @@ import { createLogger } from '../../logging/log';
const log = createLogger('attachments');
export * from '../../util/ensureAttachmentIsReencryptable';
export * from '../../../app/attachments';
type FSAttrType = {