Enable downloading attachments from backup CDN
This commit is contained in:
parent
2964006b79
commit
1e8047cf73
21 changed files with 989 additions and 385 deletions
|
@ -12,6 +12,18 @@ message BackupInfo {
|
|||
uint64 backupTimeMs = 2;
|
||||
}
|
||||
|
||||
// Frames must follow in the following ordering rules:
|
||||
//
|
||||
// 1. There is exactly one AccountData and it is the first frame.
|
||||
// 2. A frame referenced by ID must come before the referencing frame.
|
||||
// e.g. a Recipient must come before any Chat referencing it.
|
||||
// 3. All ChatItems must appear in global Chat rendering order.
|
||||
// (The order in which they were received by the client.)
|
||||
//
|
||||
// Recipients, Chats, Ad-hoc Calls, & StickerPacks can be in any order.
|
||||
// (But must respect rule 2.)
|
||||
// For example, Chats may all be together at the beginning,
|
||||
// or may each immediately precede its first ChatItem.
|
||||
message Frame {
|
||||
oneof item {
|
||||
AccountData account = 1;
|
||||
|
@ -364,10 +376,16 @@ message FilePointer {
|
|||
// References attachments in the backup (media) storage tier.
|
||||
message BackupLocator {
|
||||
string mediaName = 1;
|
||||
uint32 cdnNumber = 2;
|
||||
// If present, the cdn number of the succesful upload.
|
||||
// If empty/0, may still have been uploaded, and clients
|
||||
// can discover the cdn number via the list endpoint.
|
||||
optional uint32 cdnNumber = 2;
|
||||
bytes key = 3;
|
||||
bytes digest = 4;
|
||||
uint32 size = 5;
|
||||
// Fallback in case backup tier upload failed.
|
||||
optional string transitCdnKey = 6;
|
||||
optional uint32 transitCdnNumber = 7;
|
||||
}
|
||||
|
||||
// References attachments in the transit storage tier.
|
||||
|
@ -519,9 +537,17 @@ message IndividualCallChatUpdate {
|
|||
}
|
||||
|
||||
message GroupCallChatUpdate {
|
||||
enum LocalUserJoined {
|
||||
UNKNOWN = 0;
|
||||
JOINED = 1;
|
||||
DID_NOT_JOIN = 2;
|
||||
}
|
||||
|
||||
optional bytes startedCallAci = 1;
|
||||
uint64 startedCallTimestamp = 2;
|
||||
repeated bytes inCallAcis = 3;
|
||||
uint64 endedCallTimestamp = 4; // 0 indicates we do not know
|
||||
LocalUserJoined localUserJoined = 5;
|
||||
}
|
||||
|
||||
message SimpleChatUpdate {
|
||||
|
@ -820,4 +846,4 @@ message StickerPack {
|
|||
message StickerPackSticker {
|
||||
string emoji = 1;
|
||||
uint32 id = 2;
|
||||
}
|
||||
}
|
|
@ -26,6 +26,7 @@ import type { AttachmentType } from './types/Attachment';
|
|||
import type { ContextType } from './types/Message2';
|
||||
import { strictAssert } from './util/assert';
|
||||
import * as Errors from './types/errors';
|
||||
import { isNotNil } from './util/isNotNil';
|
||||
|
||||
// This file was split from ts/Crypto.ts because it pulls things in from node, and
|
||||
// too many things pull in Crypto.ts, so it broke storybook.
|
||||
|
@ -58,10 +59,12 @@ export async function encryptAttachmentV2({
|
|||
keys,
|
||||
plaintextAbsolutePath,
|
||||
dangerousTestOnlyIv,
|
||||
dangerousTestOnlySkipPadding = false,
|
||||
}: {
|
||||
keys: Readonly<Uint8Array>;
|
||||
plaintextAbsolutePath: string;
|
||||
dangerousTestOnlyIv?: Readonly<Uint8Array>;
|
||||
dangerousTestOnlySkipPadding?: boolean;
|
||||
}): Promise<EncryptedAttachmentV2> {
|
||||
const logId = 'encryptAttachmentV2';
|
||||
|
||||
|
@ -75,6 +78,14 @@ export async function encryptAttachmentV2({
|
|||
if (dangerousTestOnlyIv && window.getEnvironment() !== Environment.Test) {
|
||||
throw new Error(`${logId}: Used dangerousTestOnlyIv outside tests!`);
|
||||
}
|
||||
if (
|
||||
dangerousTestOnlySkipPadding &&
|
||||
window.getEnvironment() !== Environment.Test
|
||||
) {
|
||||
throw new Error(
|
||||
`${logId}: Used dangerousTestOnlySkipPadding outside tests!`
|
||||
);
|
||||
}
|
||||
const iv = dangerousTestOnlyIv || _generateAttachmentIv();
|
||||
|
||||
const plaintextHash = createHash(HashType.size256);
|
||||
|
@ -96,14 +107,16 @@ export async function encryptAttachmentV2({
|
|||
}
|
||||
|
||||
await pipeline(
|
||||
readFd.createReadStream(),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
appendPaddingStream(),
|
||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependIv(iv),
|
||||
appendMacStream(macKey),
|
||||
peekAndUpdateHash(digest),
|
||||
writeFd.createWriteStream()
|
||||
[
|
||||
readFd.createReadStream(),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
|
||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependIv(iv),
|
||||
appendMacStream(macKey),
|
||||
peekAndUpdateHash(digest),
|
||||
writeFd.createWriteStream(),
|
||||
].filter(isNotNil)
|
||||
);
|
||||
} catch (error) {
|
||||
log.error(
|
||||
|
@ -136,32 +149,59 @@ export async function encryptAttachmentV2({
|
|||
};
|
||||
}
|
||||
|
||||
export async function decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id,
|
||||
keys,
|
||||
size,
|
||||
theirDigest,
|
||||
}: {
|
||||
type DecryptAttachmentOptionsType = Readonly<{
|
||||
ciphertextPath: string;
|
||||
id: string;
|
||||
keys: Readonly<Uint8Array>;
|
||||
idForLogging: string;
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
size: number;
|
||||
theirDigest: Readonly<Uint8Array>;
|
||||
}): Promise<DecryptedAttachmentV2> {
|
||||
const logId = `decryptAttachmentV2(${id})`;
|
||||
outerEncryption?: {
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
};
|
||||
}>;
|
||||
|
||||
export async function decryptAttachmentV2(
|
||||
options: DecryptAttachmentOptionsType
|
||||
): Promise<DecryptedAttachmentV2> {
|
||||
const {
|
||||
idForLogging,
|
||||
macKey,
|
||||
aesKey,
|
||||
ciphertextPath,
|
||||
theirDigest,
|
||||
outerEncryption,
|
||||
} = options;
|
||||
|
||||
const logId = `decryptAttachmentV2(${idForLogging})`;
|
||||
|
||||
// Create random output file
|
||||
const relativeTargetPath = getRelativePath(createName());
|
||||
const absoluteTargetPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
|
||||
const { aesKey, macKey } = splitKeys(keys);
|
||||
|
||||
const digest = createHash(HashType.size256);
|
||||
const hmac = createHmac(HashType.size256, macKey);
|
||||
const plaintextHash = createHash(HashType.size256);
|
||||
let theirMac = null as Uint8Array | null; // TypeScript shenanigans
|
||||
let theirMac: Uint8Array | undefined;
|
||||
|
||||
// When downloading from backup there is an outer encryption layer; in that case we
|
||||
// need to decrypt the outer layer and check its MAC
|
||||
let theirOuterMac: Uint8Array | undefined;
|
||||
const outerHmac = outerEncryption
|
||||
? createHmac(HashType.size256, outerEncryption.macKey)
|
||||
: undefined;
|
||||
|
||||
const maybeOuterEncryptionGetIvAndDecipher = outerEncryption
|
||||
? getIvAndDecipher(outerEncryption.aesKey)
|
||||
: undefined;
|
||||
|
||||
const maybeOuterEncryptionGetMacAndUpdateMac = outerHmac
|
||||
? getMacAndUpdateHmac(outerHmac, theirOuterMacValue => {
|
||||
theirOuterMac = theirOuterMacValue;
|
||||
})
|
||||
: undefined;
|
||||
|
||||
let readFd;
|
||||
let writeFd;
|
||||
|
@ -179,15 +219,19 @@ export async function decryptAttachmentV2({
|
|||
}
|
||||
|
||||
await pipeline(
|
||||
readFd.createReadStream(),
|
||||
peekAndUpdateHash(digest),
|
||||
getMacAndUpdateHmac(hmac, theirMacValue => {
|
||||
theirMac = theirMacValue;
|
||||
}),
|
||||
getIvAndDecipher(aesKey),
|
||||
trimPadding(size),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
writeFd.createWriteStream()
|
||||
[
|
||||
readFd.createReadStream(),
|
||||
maybeOuterEncryptionGetMacAndUpdateMac,
|
||||
maybeOuterEncryptionGetIvAndDecipher,
|
||||
peekAndUpdateHash(digest),
|
||||
getMacAndUpdateHmac(hmac, theirMacValue => {
|
||||
theirMac = theirMacValue;
|
||||
}),
|
||||
getIvAndDecipher(aesKey),
|
||||
trimPadding(options.size),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
writeFd.createWriteStream(),
|
||||
].filter(isNotNil)
|
||||
);
|
||||
} catch (error) {
|
||||
log.error(
|
||||
|
@ -224,11 +268,29 @@ export async function decryptAttachmentV2({
|
|||
if (!constantTimeEqual(ourMac, theirMac)) {
|
||||
throw new Error(`${logId}: Bad MAC`);
|
||||
}
|
||||
|
||||
if (!constantTimeEqual(ourDigest, theirDigest)) {
|
||||
throw new Error(`${logId}: Bad digest`);
|
||||
}
|
||||
|
||||
if (outerEncryption) {
|
||||
strictAssert(outerHmac, 'outerHmac must exist');
|
||||
|
||||
const ourOuterMac = outerHmac.digest();
|
||||
strictAssert(
|
||||
ourOuterMac.byteLength === ATTACHMENT_MAC_LENGTH,
|
||||
`${logId}: Failed to generate ourOuterMac!`
|
||||
);
|
||||
strictAssert(
|
||||
theirOuterMac != null &&
|
||||
theirOuterMac.byteLength === ATTACHMENT_MAC_LENGTH,
|
||||
`${logId}: Failed to find theirOuterMac!`
|
||||
);
|
||||
|
||||
if (!constantTimeEqual(ourOuterMac, theirOuterMac)) {
|
||||
throw new Error(`${logId}: Bad outer encryption MAC`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path: relativeTargetPath,
|
||||
plaintextHash: ourPlaintextHash,
|
||||
|
@ -238,7 +300,12 @@ export async function decryptAttachmentV2({
|
|||
/**
|
||||
* Splits the keys into aes and mac keys.
|
||||
*/
|
||||
function splitKeys(keys: Uint8Array) {
|
||||
|
||||
type AttachmentEncryptionKeysType = {
|
||||
aesKey: Uint8Array;
|
||||
macKey: Uint8Array;
|
||||
};
|
||||
export function splitKeys(keys: Uint8Array): AttachmentEncryptionKeysType {
|
||||
strictAssert(
|
||||
keys.byteLength === KEY_SET_LENGTH,
|
||||
`attachment keys must be ${KEY_SET_LENGTH} bytes, got ${keys.byteLength}`
|
||||
|
@ -376,10 +443,20 @@ function trimPadding(size: number) {
|
|||
});
|
||||
}
|
||||
|
||||
export function getAttachmentDownloadSize(size: number): number {
|
||||
export function getAttachmentCiphertextLength(plaintextLength: number): number {
|
||||
const paddedPlaintextSize = logPadSize(plaintextLength);
|
||||
|
||||
return (
|
||||
// Multiply this by 1.05 to allow some variance
|
||||
logPadSize(size) * 1.05 + IV_LENGTH + ATTACHMENT_MAC_LENGTH
|
||||
IV_LENGTH +
|
||||
getAesCbcCiphertextLength(paddedPlaintextSize) +
|
||||
ATTACHMENT_MAC_LENGTH
|
||||
);
|
||||
}
|
||||
|
||||
export function getAesCbcCiphertextLength(plaintextLength: number): number {
|
||||
const AES_CBC_BLOCK_SIZE = 16;
|
||||
return (
|
||||
(1 + Math.floor(plaintextLength / AES_CBC_BLOCK_SIZE)) * AES_CBC_BLOCK_SIZE
|
||||
);
|
||||
}
|
||||
|
||||
|
|
63
ts/Crypto.ts
63
ts/Crypto.ts
|
@ -208,10 +208,23 @@ export type BackupKeyMaterialType = Readonly<{
|
|||
aesKey: Uint8Array;
|
||||
}>;
|
||||
|
||||
export type BackupMediaKeyMaterialType = Readonly<{
|
||||
macKey: Uint8Array;
|
||||
aesKey: Uint8Array;
|
||||
iv: Uint8Array;
|
||||
}>;
|
||||
|
||||
const BACKUP_AES_KEY_LEN = 32;
|
||||
const BACKUP_MAC_KEY_LEN = 32;
|
||||
const BACKUP_MATERIAL_INFO = '20231003_Signal_Backups_EncryptMessageBackup';
|
||||
|
||||
const BACKUP_MEDIA_ID_INFO = '20231003_Signal_Backups_Media_ID';
|
||||
const BACKUP_MEDIA_ID_LEN = 15;
|
||||
const BACKUP_MEDIA_ENCRYPT_INFO = '20231003_Signal_Backups_Media_ID';
|
||||
const BACKUP_MEDIA_AES_KEY_LEN = 32;
|
||||
const BACKUP_MEDIA_MAC_KEY_LEN = 32;
|
||||
const BACKUP_MEDIA_IV_LEN = 16;
|
||||
|
||||
export function deriveBackupKeyMaterial(
|
||||
backupKey: Uint8Array,
|
||||
backupId: Uint8Array
|
||||
|
@ -238,6 +251,56 @@ export function deriveBackupKeyMaterial(
|
|||
};
|
||||
}
|
||||
|
||||
export function deriveMediaIdFromMediaName(
|
||||
backupKey: Uint8Array,
|
||||
mediaName: string
|
||||
): Uint8Array {
|
||||
if (backupKey.byteLength !== BACKUP_KEY_LEN) {
|
||||
throw new Error('deriveMediaIdFromMediaName: invalid backup key length');
|
||||
}
|
||||
|
||||
if (!mediaName) {
|
||||
throw new Error('deriveMediaIdFromMediaName: mediaName missing');
|
||||
}
|
||||
|
||||
const hkdf = HKDF.new(3);
|
||||
return hkdf.deriveSecrets(
|
||||
BACKUP_MEDIA_ID_LEN,
|
||||
Buffer.from(backupKey),
|
||||
Buffer.from(BACKUP_MEDIA_ID_INFO),
|
||||
Buffer.from(Bytes.fromBase64(mediaName))
|
||||
);
|
||||
}
|
||||
|
||||
export function deriveBackupMediaKeyMaterial(
|
||||
backupKey: Uint8Array,
|
||||
mediaId: Uint8Array
|
||||
): BackupMediaKeyMaterialType {
|
||||
if (backupKey.byteLength !== BACKUP_KEY_LEN) {
|
||||
throw new Error('deriveMediaIdFromMediaName: invalid backup key length');
|
||||
}
|
||||
|
||||
if (!mediaId.length) {
|
||||
throw new Error('deriveMediaIdFromMediaName: mediaId missing');
|
||||
}
|
||||
|
||||
const hkdf = HKDF.new(3);
|
||||
const material = hkdf.deriveSecrets(
|
||||
BACKUP_MEDIA_MAC_KEY_LEN + BACKUP_MEDIA_AES_KEY_LEN + BACKUP_MEDIA_IV_LEN,
|
||||
Buffer.from(backupKey),
|
||||
Buffer.from(BACKUP_MEDIA_ENCRYPT_INFO),
|
||||
Buffer.from(mediaId)
|
||||
);
|
||||
|
||||
return {
|
||||
macKey: material.subarray(0, BACKUP_MEDIA_MAC_KEY_LEN),
|
||||
aesKey: material.subarray(
|
||||
BACKUP_MEDIA_MAC_KEY_LEN,
|
||||
BACKUP_MEDIA_MAC_KEY_LEN + BACKUP_MEDIA_AES_KEY_LEN
|
||||
),
|
||||
iv: material.subarray(BACKUP_MEDIA_MAC_KEY_LEN + BACKUP_MEDIA_AES_KEY_LEN),
|
||||
};
|
||||
}
|
||||
export function deriveStorageItemKey(
|
||||
storageServiceKey: Uint8Array,
|
||||
itemID: string
|
||||
|
|
|
@ -13,6 +13,7 @@ import type {
|
|||
import type { BackupCredentials } from './credentials';
|
||||
|
||||
export class BackupAPI {
|
||||
private cachedBackupInfo: GetBackupInfoResponseType | undefined;
|
||||
constructor(private credentials: BackupCredentials) {}
|
||||
|
||||
public async refresh(): Promise<void> {
|
||||
|
@ -23,9 +24,33 @@ export class BackupAPI {
|
|||
}
|
||||
|
||||
public async getInfo(): Promise<GetBackupInfoResponseType> {
|
||||
return this.server.getBackupInfo(
|
||||
const backupInfo = await this.server.getBackupInfo(
|
||||
await this.credentials.getHeadersForToday()
|
||||
);
|
||||
this.cachedBackupInfo = backupInfo;
|
||||
return backupInfo;
|
||||
}
|
||||
|
||||
private async getCachedInfo(): Promise<GetBackupInfoResponseType> {
|
||||
if (this.cachedBackupInfo) {
|
||||
return this.cachedBackupInfo;
|
||||
}
|
||||
|
||||
return this.getInfo();
|
||||
}
|
||||
|
||||
public async getMediaDir(): Promise<string> {
|
||||
return (await this.getCachedInfo()).mediaDir;
|
||||
}
|
||||
|
||||
public async getBackupDir(): Promise<string> {
|
||||
return (await this.getCachedInfo())?.backupDir;
|
||||
}
|
||||
|
||||
// Backup name will change whenever a new backup is created, so we don't want to cache
|
||||
// it
|
||||
public async getBackupName(): Promise<string> {
|
||||
return (await this.getInfo()).backupName;
|
||||
}
|
||||
|
||||
public async getUploadForm(): Promise<GetBackupUploadFormResponseType> {
|
||||
|
@ -63,6 +88,10 @@ export class BackupAPI {
|
|||
});
|
||||
}
|
||||
|
||||
public clearCache(): void {
|
||||
this.cachedBackupInfo = undefined;
|
||||
}
|
||||
|
||||
private get server(): WebAPIType {
|
||||
const { server } = window.textsecure;
|
||||
strictAssert(server, 'server not available');
|
||||
|
|
|
@ -6,16 +6,18 @@ import {
|
|||
BackupAuthCredential,
|
||||
BackupAuthCredentialRequestContext,
|
||||
BackupAuthCredentialResponse,
|
||||
type BackupLevel,
|
||||
GenericServerPublicParams,
|
||||
} from '@signalapp/libsignal-client/zkgroup';
|
||||
|
||||
import * as log from '../../logging/log';
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import { drop } from '../../util/drop';
|
||||
import { toDayMillis } from '../../util/timestamp';
|
||||
import { DAY, DurationInSeconds } from '../../util/durations';
|
||||
import { isMoreRecentThan, toDayMillis } from '../../util/timestamp';
|
||||
import { DAY, DurationInSeconds, HOUR } from '../../util/durations';
|
||||
import { BackOff, FIBONACCI_TIMEOUTS } from '../../util/BackOff';
|
||||
import type {
|
||||
BackupCdnReadCredentialType,
|
||||
BackupCredentialType,
|
||||
BackupPresentationHeadersType,
|
||||
BackupSignedPresentationType,
|
||||
|
@ -37,9 +39,15 @@ export function getAuthContext(): BackupAuthCredentialRequestContext {
|
|||
|
||||
const FETCH_INTERVAL = 3 * DAY;
|
||||
|
||||
// Credentials should be good for 24 hours, but let's play it safe.
|
||||
const BACKUP_CDN_READ_CREDENTIALS_VALID_DURATION = 12 * HOUR;
|
||||
|
||||
export class BackupCredentials {
|
||||
private activeFetch: ReturnType<typeof this.fetch> | undefined;
|
||||
|
||||
private cachedCdnReadCredentials: Record<
|
||||
number,
|
||||
BackupCdnReadCredentialType
|
||||
> = {};
|
||||
private readonly fetchBackoff = new BackOff(FIBONACCI_TIMEOUTS);
|
||||
|
||||
public start(): void {
|
||||
|
@ -112,15 +120,41 @@ export class BackupCredentials {
|
|||
return headers;
|
||||
}
|
||||
|
||||
public async getCDNCredentials(
|
||||
public async getCDNReadCredentials(
|
||||
cdn: number
|
||||
): Promise<GetBackupCDNCredentialsResponseType> {
|
||||
const { server } = window.textsecure;
|
||||
strictAssert(server, 'server not available');
|
||||
|
||||
// Backup CDN read credentials are short-lived; we'll just cache them in memory so
|
||||
// that they get invalidated for any reason, we'll fetch new ones on app restart
|
||||
const cachedCredentialsForThisCdn = this.cachedCdnReadCredentials[cdn];
|
||||
|
||||
if (
|
||||
cachedCredentialsForThisCdn &&
|
||||
isMoreRecentThan(
|
||||
cachedCredentialsForThisCdn.retrievedAtMs,
|
||||
BACKUP_CDN_READ_CREDENTIALS_VALID_DURATION
|
||||
)
|
||||
) {
|
||||
return cachedCredentialsForThisCdn.credentials;
|
||||
}
|
||||
|
||||
const headers = await this.getHeadersForToday();
|
||||
|
||||
return server.getBackupCDNCredentials({ headers, cdn });
|
||||
const retrievedAtMs = Date.now();
|
||||
const newCredentials = await server.getBackupCDNCredentials({
|
||||
headers,
|
||||
cdn,
|
||||
});
|
||||
|
||||
this.cachedCdnReadCredentials[cdn] = {
|
||||
credentials: newCredentials,
|
||||
cdnNumber: cdn,
|
||||
retrievedAtMs,
|
||||
};
|
||||
|
||||
return newCredentials;
|
||||
}
|
||||
|
||||
private scheduleFetch(): void {
|
||||
|
@ -281,8 +315,13 @@ export class BackupCredentials {
|
|||
return result;
|
||||
}
|
||||
|
||||
// Called when backup tier changes
|
||||
public async clear(): Promise<void> {
|
||||
public async getBackupLevel(): Promise<BackupLevel> {
|
||||
return (await this.getForToday()).level;
|
||||
}
|
||||
|
||||
// Called when backup tier changes or when userChanged event
|
||||
public async clearCache(): Promise<void> {
|
||||
this.cachedCdnReadCredentials = {};
|
||||
await window.storage.put('backupCredentials', []);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,10 @@ import * as Bytes from '../../Bytes';
|
|||
import { BACKUP_VERSION } from './constants';
|
||||
import type { AboutMe } from './types';
|
||||
import type { GroupV2ChangeDetailType } from '../../groups';
|
||||
import { queueAttachmentDownloads } from '../../util/queueAttachmentDownloads';
|
||||
import { drop } from '../../util/drop';
|
||||
import { isNotNil } from '../../util/isNotNil';
|
||||
import { convertFilePointerToAttachment } from './util/filePointers';
|
||||
|
||||
const MAX_CONCURRENCY = 10;
|
||||
|
||||
|
@ -100,14 +103,18 @@ export class BackupImportStream extends Writable {
|
|||
name: 'BackupImport.saveMessageBatcher',
|
||||
wait: 0,
|
||||
maxSize: 1000,
|
||||
processBatch: batch => {
|
||||
processBatch: async batch => {
|
||||
const ourAci = this.ourConversation?.serviceId;
|
||||
assertDev(isAciString(ourAci), 'Our conversation must have ACI');
|
||||
return Data.saveMessages(batch, {
|
||||
await Data.saveMessages(batch, {
|
||||
forceSave: true,
|
||||
ourAci,
|
||||
});
|
||||
// TODO (DESKTOP-6845): after we save messages, queue their attachment downloads
|
||||
|
||||
// TODO (DESKTOP-7402): consider re-saving after updating the pending state
|
||||
for (const messageAttributes of batch) {
|
||||
drop(queueAttachmentDownloads(messageAttributes));
|
||||
}
|
||||
},
|
||||
});
|
||||
private ourConversation?: ConversationAttributesType;
|
||||
|
@ -722,7 +729,14 @@ export class BackupImportStream extends Writable {
|
|||
): Partial<MessageAttributesType> {
|
||||
return {
|
||||
body: data.text?.body ?? '',
|
||||
// TODO (DESKTOP-6845): add attachments
|
||||
attachments: data.attachments
|
||||
?.map(attachment => {
|
||||
if (!attachment.pointer) {
|
||||
return null;
|
||||
}
|
||||
return convertFilePointerToAttachment(attachment.pointer);
|
||||
})
|
||||
.filter(isNotNil),
|
||||
reactions: data.reactions?.map(
|
||||
({ emoji, authorId, sentTimestamp, receivedTimestamp }) => {
|
||||
strictAssert(emoji != null, 'reaction must have an emoji');
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { pipeline } from 'stream/promises';
|
||||
import { PassThrough } from 'stream';
|
||||
import type { Readable, Writable } from 'stream';
|
||||
import { createWriteStream } from 'fs';
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { createGzip, createGunzip } from 'zlib';
|
||||
import { createCipheriv, createHmac, randomBytes } from 'crypto';
|
||||
import { noop } from 'lodash';
|
||||
|
@ -54,6 +54,11 @@ export class BackupsService {
|
|||
|
||||
drop(this.runPeriodicRefresh());
|
||||
this.credentials.start();
|
||||
|
||||
window.Whisper.events.on('userChanged', () => {
|
||||
drop(this.credentials.clearCache());
|
||||
this.api.clearCache();
|
||||
});
|
||||
}
|
||||
|
||||
public async exportBackup(sink: Writable): Promise<void> {
|
||||
|
@ -113,6 +118,10 @@ export class BackupsService {
|
|||
});
|
||||
}
|
||||
|
||||
public async importFromDisk(backupFile: string): Promise<void> {
|
||||
return backupsService.importBackup(() => createReadStream(backupFile));
|
||||
}
|
||||
|
||||
public async importBackup(createBackupStream: () => Readable): Promise<void> {
|
||||
strictAssert(!this.isRunning, 'BackupService is already running');
|
||||
|
||||
|
@ -175,7 +184,7 @@ export class BackupsService {
|
|||
await this.api.refresh();
|
||||
log.info('Backup: refreshed');
|
||||
} catch (error) {
|
||||
log.error('Backup: periodic refresh failed', Errors.toLogFormat(error));
|
||||
log.error('Backup: periodic refresh kufailed', Errors.toLogFormat(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
96
ts/services/backups/util/filePointers.ts
Normal file
96
ts/services/backups/util/filePointers.ts
Normal file
|
@ -0,0 +1,96 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
import {
|
||||
APPLICATION_OCTET_STREAM,
|
||||
stringToMIMEType,
|
||||
} from '../../../types/MIME';
|
||||
import type { AttachmentType } from '../../../types/Attachment';
|
||||
import type { Backups } from '../../../protobuf';
|
||||
import * as Bytes from '../../../Bytes';
|
||||
import { getTimestampFromLong } from '../../../util/timestampLongUtils';
|
||||
|
||||
export function convertFilePointerToAttachment(
|
||||
filePointer: Backups.FilePointer
|
||||
): AttachmentType {
|
||||
const {
|
||||
contentType,
|
||||
width,
|
||||
height,
|
||||
fileName,
|
||||
caption,
|
||||
blurHash,
|
||||
incrementalMac,
|
||||
incrementalMacChunkSize,
|
||||
attachmentLocator,
|
||||
backupLocator,
|
||||
invalidAttachmentLocator,
|
||||
} = filePointer;
|
||||
|
||||
const commonProps: Omit<AttachmentType, 'size'> = {
|
||||
contentType: contentType
|
||||
? stringToMIMEType(contentType)
|
||||
: APPLICATION_OCTET_STREAM,
|
||||
width: width ?? undefined,
|
||||
height: height ?? undefined,
|
||||
fileName: fileName ?? undefined,
|
||||
caption: caption ?? undefined,
|
||||
blurHash: blurHash ?? undefined,
|
||||
incrementalMac: incrementalMac?.length
|
||||
? Bytes.toBase64(incrementalMac)
|
||||
: undefined,
|
||||
incrementalMacChunkSize: incrementalMacChunkSize ?? undefined,
|
||||
};
|
||||
|
||||
if (attachmentLocator) {
|
||||
const { cdnKey, cdnNumber, key, digest, uploadTimestamp, size } =
|
||||
attachmentLocator;
|
||||
return {
|
||||
...commonProps,
|
||||
size: size ?? 0,
|
||||
cdnKey: cdnKey ?? undefined,
|
||||
cdnNumber: cdnNumber ?? undefined,
|
||||
key: key?.length ? Bytes.toBase64(key) : undefined,
|
||||
digest: digest?.length ? Bytes.toBase64(digest) : undefined,
|
||||
uploadTimestamp: uploadTimestamp
|
||||
? getTimestampFromLong(uploadTimestamp)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (backupLocator) {
|
||||
const {
|
||||
mediaName,
|
||||
cdnNumber,
|
||||
key,
|
||||
digest,
|
||||
size,
|
||||
transitCdnKey,
|
||||
transitCdnNumber,
|
||||
} = backupLocator;
|
||||
|
||||
return {
|
||||
...commonProps,
|
||||
cdnKey: transitCdnKey ?? undefined,
|
||||
cdnNumber: transitCdnNumber ?? undefined,
|
||||
key: key?.length ? Bytes.toBase64(key) : undefined,
|
||||
digest: digest?.length ? Bytes.toBase64(digest) : undefined,
|
||||
size: size ?? 0,
|
||||
backupLocator: mediaName
|
||||
? {
|
||||
mediaName,
|
||||
cdnNumber: cdnNumber ?? undefined,
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (invalidAttachmentLocator) {
|
||||
return {
|
||||
...commonProps,
|
||||
error: true,
|
||||
size: 0,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('convertFilePointerToAttachment: mising locator');
|
||||
}
|
|
@ -5,7 +5,7 @@ import { assert } from 'chai';
|
|||
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { randomBytes } from 'crypto';
|
||||
import { createCipheriv, randomBytes } from 'crypto';
|
||||
import * as log from '../logging/log';
|
||||
import * as Bytes from '../Bytes';
|
||||
import * as Curve from '../Curve';
|
||||
|
@ -34,12 +34,15 @@ import {
|
|||
encryptAttachment,
|
||||
decryptAttachmentV1,
|
||||
padAndEncryptAttachment,
|
||||
CipherType,
|
||||
} from '../Crypto';
|
||||
import {
|
||||
KEY_SET_LENGTH,
|
||||
_generateAttachmentIv,
|
||||
decryptAttachmentV2,
|
||||
encryptAttachmentV2,
|
||||
getAesCbcCiphertextLength,
|
||||
splitKeys,
|
||||
} from '../AttachmentCrypto';
|
||||
import { createTempDir, deleteTempDir } from '../updater/common';
|
||||
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
|
||||
|
@ -576,8 +579,8 @@ describe('Crypto', () => {
|
|||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -613,8 +616,8 @@ describe('Crypto', () => {
|
|||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -661,8 +664,8 @@ describe('Crypto', () => {
|
|||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -770,5 +773,205 @@ describe('Crypto', () => {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
describe('decryptAttachmentV2 with outer layer of encryption', () => {
|
||||
async function doubleEncrypt({
|
||||
plaintextAbsolutePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
}: {
|
||||
plaintextAbsolutePath: string;
|
||||
innerKeys: Uint8Array;
|
||||
outerKeys: Uint8Array;
|
||||
}) {
|
||||
let innerCiphertextPath;
|
||||
let outerCiphertextPath;
|
||||
let innerEncryptedAttachment;
|
||||
try {
|
||||
innerEncryptedAttachment = await encryptAttachmentV2({
|
||||
keys: innerKeys,
|
||||
plaintextAbsolutePath,
|
||||
});
|
||||
innerCiphertextPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
innerEncryptedAttachment.path
|
||||
);
|
||||
|
||||
const outerEncryptedAttachment = await encryptAttachmentV2({
|
||||
keys: outerKeys,
|
||||
plaintextAbsolutePath: innerCiphertextPath,
|
||||
// We (and the server!) don't pad the second layer
|
||||
dangerousTestOnlySkipPadding: true,
|
||||
});
|
||||
|
||||
outerCiphertextPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
outerEncryptedAttachment.path
|
||||
);
|
||||
} finally {
|
||||
if (innerCiphertextPath) {
|
||||
unlinkSync(innerCiphertextPath);
|
||||
}
|
||||
}
|
||||
return {
|
||||
outerCiphertextPath,
|
||||
innerEncryptedAttachment,
|
||||
};
|
||||
}
|
||||
|
||||
it('v2 roundtrips smaller file (all on disk)', async () => {
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: splitKeys(outerKeys),
|
||||
});
|
||||
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptResult.innerEncryptedAttachment.plaintextHash,
|
||||
GHOST_KITTY_HASH
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptResult.innerEncryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips random data (all on disk)', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: splitKeys(outerKeys),
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 fails if outer encryption mac is wrong', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
await assert.isRejected(
|
||||
decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: {
|
||||
aesKey: splitKeys(outerKeys).aesKey,
|
||||
macKey: splitKeys(innerKeys).macKey, // wrong mac!
|
||||
},
|
||||
}),
|
||||
/Bad outer encryption MAC/
|
||||
);
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAesCbcCiphertextLength', () => {
|
||||
function encrypt(length: number) {
|
||||
const cipher = createCipheriv(
|
||||
CipherType.AES256CBC,
|
||||
getRandomBytes(32),
|
||||
getRandomBytes(16)
|
||||
);
|
||||
const encrypted = cipher.update(Buffer.alloc(length));
|
||||
return Buffer.concat([encrypted, cipher.final()]);
|
||||
}
|
||||
it('calculates cipherTextLength correctly', () => {
|
||||
for (let i = 0; i < 128; i += 1) {
|
||||
assert.strictEqual(getAesCbcCiphertextLength(i), encrypt(i).length);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { assert } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
import { getMediaNameForBackup } from '../../util/attachments/getMediaNameForBackup';
|
||||
import { IMAGE_PNG } from '../../types/MIME';
|
||||
import { sha256 } from '../../Crypto';
|
||||
import { DAY } from '../../util/durations';
|
||||
|
||||
describe('getMediaNameForBackup', () => {
|
||||
const TEST_HASH = sha256(Buffer.from('testattachmentdata'));
|
||||
const TEST_HASH_BASE_64 =
|
||||
// calculated as Buffer.from(TEST_HASH).toString('base64')
|
||||
'ds5/U14lB2ziO90B7MldFTJUQdyw4qQ9y6Gnt9fmHL0=';
|
||||
|
||||
afterEach(function (this: Mocha.Context) {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it("should return base64 encoded plaintextHash if it's already been calculated", async () => {
|
||||
assert.strictEqual(
|
||||
await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
plaintextHash: Buffer.from(TEST_HASH).toString('hex'),
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
),
|
||||
TEST_HASH_BASE_64
|
||||
);
|
||||
});
|
||||
|
||||
it('should calculate hash from file on disk if plaintextHash has not yet been calculated', async () => {
|
||||
const stubbedGetHashFromDisk = sinon
|
||||
.stub()
|
||||
.callsFake(async (_path: string) =>
|
||||
Buffer.from(TEST_HASH).toString('hex')
|
||||
);
|
||||
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
path: 'path/to/file',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now(),
|
||||
{ getPlaintextHashForAttachmentOnDisk: stubbedGetHashFromDisk }
|
||||
);
|
||||
|
||||
assert.strictEqual(stubbedGetHashFromDisk.callCount, 1);
|
||||
assert.strictEqual(mediaName, TEST_HASH_BASE_64);
|
||||
});
|
||||
|
||||
it('should return temporary identifier if attachment is undownloaded but in attachment tier', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, 'senderAci_cdnKey');
|
||||
});
|
||||
|
||||
it('should return temporary identifier if undownloaded attachment has temporary error', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
error: true,
|
||||
key: 'attachmentkey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, 'senderAci_cdnKey');
|
||||
});
|
||||
|
||||
it('should return undefined if attachment is too old to be in attachment tier', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now() - 31 * DAY
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, undefined);
|
||||
});
|
||||
});
|
149
ts/test-node/backups/filePointer_test.ts
Normal file
149
ts/test-node/backups/filePointer_test.ts
Normal file
|
@ -0,0 +1,149 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
import { assert } from 'chai';
|
||||
import Long from 'long';
|
||||
import { Backups } from '../../protobuf';
|
||||
import { convertFilePointerToAttachment } from '../../services/backups/util/filePointers';
|
||||
import { APPLICATION_OCTET_STREAM, IMAGE_PNG } from '../../types/MIME';
|
||||
import * as Bytes from '../../Bytes';
|
||||
|
||||
describe('convertFilePointerToAttachment', () => {
|
||||
it('processes filepointer with attachmentLocator', () => {
|
||||
const result = convertFilePointerToAttachment(
|
||||
new Backups.FilePointer({
|
||||
contentType: 'image/png',
|
||||
width: 100,
|
||||
height: 100,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
incrementalMac: Bytes.fromString('incrementalMac'),
|
||||
incrementalMacChunkSize: 1000,
|
||||
attachmentLocator: new Backups.FilePointer.AttachmentLocator({
|
||||
size: 128,
|
||||
cdnKey: 'cdnKey',
|
||||
cdnNumber: 2,
|
||||
key: Bytes.fromString('key'),
|
||||
digest: Bytes.fromString('digest'),
|
||||
uploadTimestamp: Long.fromNumber(1970),
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(result, {
|
||||
contentType: IMAGE_PNG,
|
||||
width: 100,
|
||||
height: 100,
|
||||
size: 128,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
cdnKey: 'cdnKey',
|
||||
cdnNumber: 2,
|
||||
key: Bytes.toBase64(Bytes.fromString('key')),
|
||||
digest: Bytes.toBase64(Bytes.fromString('digest')),
|
||||
uploadTimestamp: 1970,
|
||||
incrementalMac: Bytes.toBase64(Bytes.fromString('incrementalMac')),
|
||||
incrementalMacChunkSize: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('processes filepointer with backupLocator and missing fields', () => {
|
||||
const result = convertFilePointerToAttachment(
|
||||
new Backups.FilePointer({
|
||||
contentType: 'image/png',
|
||||
width: 100,
|
||||
height: 100,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
incrementalMac: Bytes.fromString('incrementalMac'),
|
||||
incrementalMacChunkSize: 1000,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
mediaName: 'mediaName',
|
||||
cdnNumber: 3,
|
||||
size: 128,
|
||||
key: Bytes.fromString('key'),
|
||||
digest: Bytes.fromString('digest'),
|
||||
transitCdnKey: 'transitCdnKey',
|
||||
transitCdnNumber: 2,
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(result, {
|
||||
contentType: IMAGE_PNG,
|
||||
width: 100,
|
||||
height: 100,
|
||||
size: 128,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
cdnKey: 'transitCdnKey',
|
||||
cdnNumber: 2,
|
||||
key: Bytes.toBase64(Bytes.fromString('key')),
|
||||
digest: Bytes.toBase64(Bytes.fromString('digest')),
|
||||
incrementalMac: Bytes.toBase64(Bytes.fromString('incrementalMac')),
|
||||
incrementalMacChunkSize: 1000,
|
||||
backupLocator: {
|
||||
mediaName: 'mediaName',
|
||||
cdnNumber: 3,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('processes filepointer with invalidAttachmentLocator', () => {
|
||||
const result = convertFilePointerToAttachment(
|
||||
new Backups.FilePointer({
|
||||
contentType: 'image/png',
|
||||
width: 100,
|
||||
height: 100,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
incrementalMac: Bytes.fromString('incrementalMac'),
|
||||
incrementalMacChunkSize: 1000,
|
||||
invalidAttachmentLocator:
|
||||
new Backups.FilePointer.InvalidAttachmentLocator(),
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(result, {
|
||||
contentType: IMAGE_PNG,
|
||||
width: 100,
|
||||
height: 100,
|
||||
blurHash: 'blurhash',
|
||||
fileName: 'filename',
|
||||
caption: 'caption',
|
||||
incrementalMac: Bytes.toBase64(Bytes.fromString('incrementalMac')),
|
||||
incrementalMacChunkSize: 1000,
|
||||
size: 0,
|
||||
error: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts missing / null fields and adds defaults to contentType and size', () => {
|
||||
const result = convertFilePointerToAttachment(
|
||||
new Backups.FilePointer({
|
||||
backupLocator: new Backups.FilePointer.BackupLocator(),
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(result, {
|
||||
contentType: APPLICATION_OCTET_STREAM,
|
||||
size: 0,
|
||||
width: undefined,
|
||||
height: undefined,
|
||||
blurHash: undefined,
|
||||
fileName: undefined,
|
||||
caption: undefined,
|
||||
cdnKey: undefined,
|
||||
cdnNumber: undefined,
|
||||
key: undefined,
|
||||
digest: undefined,
|
||||
incrementalMac: undefined,
|
||||
incrementalMacChunkSize: undefined,
|
||||
backupLocator: undefined,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -83,7 +83,7 @@ import {
|
|||
import { processSyncMessage } from './processSyncMessage';
|
||||
import type { EventHandler } from './EventTarget';
|
||||
import EventTarget from './EventTarget';
|
||||
import { downloadAttachmentV2 } from './downloadAttachment';
|
||||
import { downloadAttachment } from './downloadAttachment';
|
||||
import type { IncomingWebSocketRequest } from './WebsocketResources';
|
||||
import type { ContactDetailsWithAvatar } from './ContactsParser';
|
||||
import { parseContactsV2 } from './ContactsParser';
|
||||
|
@ -3764,7 +3764,7 @@ export default class MessageReceiver
|
|||
options?: { timeout?: number; disableRetries?: boolean }
|
||||
): Promise<AttachmentType> {
|
||||
const cleaned = processAttachment(attachment);
|
||||
return downloadAttachmentV2(this.server, cleaned, options);
|
||||
return downloadAttachment(this.server, cleaned, options);
|
||||
}
|
||||
|
||||
private async handleEndSession(
|
||||
|
|
3
ts/textsecure/Types.d.ts
vendored
3
ts/textsecure/Types.d.ts
vendored
|
@ -4,7 +4,7 @@
|
|||
import type { SignalService as Proto } from '../protobuf';
|
||||
import type { IncomingWebSocketRequest } from './WebsocketResources';
|
||||
import type { ServiceIdString, AciString, PniString } from '../types/ServiceId';
|
||||
import type { TextAttachmentType } from '../types/Attachment';
|
||||
import type { AttachmentType, TextAttachmentType } from '../types/Attachment';
|
||||
import type { GiftBadgeStates } from '../components/conversation/Message';
|
||||
import type { MIMEType } from '../types/MIME';
|
||||
import type { DurationInSeconds } from '../util/durations';
|
||||
|
@ -117,6 +117,7 @@ export type ProcessedAttachment = {
|
|||
blurHash?: string;
|
||||
cdnNumber?: number;
|
||||
textAttachment?: Omit<TextAttachmentType, 'preview'>;
|
||||
backupLocator?: AttachmentType['backupLocator'];
|
||||
};
|
||||
|
||||
export type ProcessedGroupV2Context = {
|
||||
|
|
|
@ -23,10 +23,7 @@ import * as durations from '../util/durations';
|
|||
import type { ExplodePromiseResultType } from '../util/explodePromise';
|
||||
import { explodePromise } from '../util/explodePromise';
|
||||
import { getUserAgent } from '../util/getUserAgent';
|
||||
import {
|
||||
getTimeoutStream,
|
||||
getStreamWithTimeout,
|
||||
} from '../util/getStreamWithTimeout';
|
||||
import { getTimeoutStream } from '../util/getStreamWithTimeout';
|
||||
import { formatAcceptLanguageHeader } from '../util/userLanguages';
|
||||
import { toWebSafeBase64, fromWebSafeBase64 } from '../util/webSafeBase64';
|
||||
import { getBasicAuth } from '../util/getBasicAuth';
|
||||
|
@ -1154,22 +1151,25 @@ export type WebAPIType = {
|
|||
imageFiles: Array<string>
|
||||
) => Promise<Array<Uint8Array>>;
|
||||
getArtAuth: () => Promise<ArtAuthType>;
|
||||
getAttachment: (
|
||||
cdnKey: string,
|
||||
cdnNumber?: number,
|
||||
getAttachmentFromBackupTier: (args: {
|
||||
mediaId: string;
|
||||
backupDir: string;
|
||||
mediaDir: string;
|
||||
cdnNumber: number;
|
||||
headers: Record<string, string>;
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
}
|
||||
) => Promise<Uint8Array>;
|
||||
getAttachmentV2: (
|
||||
cdnKey: string,
|
||||
cdnNumber?: number,
|
||||
};
|
||||
}) => Promise<Readable>;
|
||||
getAttachment: (args: {
|
||||
cdnKey: string;
|
||||
cdnNumber?: number;
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
}
|
||||
) => Promise<Readable>;
|
||||
};
|
||||
}) => Promise<Readable>;
|
||||
getAvatar: (path: string) => Promise<Uint8Array>;
|
||||
getHasSubscription: (subscriberId: Uint8Array) => Promise<boolean>;
|
||||
getGroup: (options: GroupCredentialsType) => Promise<Proto.Group>;
|
||||
|
@ -1650,7 +1650,7 @@ export function initialize({
|
|||
getArtAuth,
|
||||
getArtProvisioningSocket,
|
||||
getAttachment,
|
||||
getAttachmentV2,
|
||||
getAttachmentFromBackupTier,
|
||||
getAvatar,
|
||||
getBackupCredentials,
|
||||
getBackupCDNCredentials,
|
||||
|
@ -3310,84 +3310,89 @@ export function initialize({
|
|||
return packId;
|
||||
}
|
||||
|
||||
async function getAttachment(
|
||||
cdnKey: string,
|
||||
cdnNumber?: number,
|
||||
// Transit tier is the default place for normal (non-backup) attachments.
|
||||
// Called "transit" because it is transitory
|
||||
async function getAttachment({
|
||||
cdnKey,
|
||||
cdnNumber,
|
||||
options,
|
||||
}: {
|
||||
cdnKey: string;
|
||||
cdnNumber?: number;
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
}
|
||||
) {
|
||||
const abortController = new AbortController();
|
||||
};
|
||||
}) {
|
||||
return _getAttachment({
|
||||
cdnPath: `/attachments/${cdnKey}`,
|
||||
cdnNumber: cdnNumber ?? 0,
|
||||
redactor: _createRedactor(cdnKey),
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
const cdnUrl = isNumber(cdnNumber)
|
||||
? cdnUrlObject[cdnNumber] ?? cdnUrlObject['0']
|
||||
: cdnUrlObject['0'];
|
||||
async function getAttachmentFromBackupTier({
|
||||
mediaId,
|
||||
backupDir,
|
||||
mediaDir,
|
||||
cdnNumber,
|
||||
headers,
|
||||
options,
|
||||
}: {
|
||||
mediaId: string;
|
||||
backupDir: string;
|
||||
mediaDir: string;
|
||||
cdnNumber: number;
|
||||
headers: Record<string, string>;
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
};
|
||||
}) {
|
||||
return _getAttachment({
|
||||
cdnPath: `/backups/${backupDir}/${mediaDir}/${mediaId}`,
|
||||
cdnNumber,
|
||||
headers,
|
||||
redactor: _createRedactor(backupDir, mediaDir, mediaId),
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
async function _getAttachment({
|
||||
cdnPath,
|
||||
cdnNumber,
|
||||
headers,
|
||||
redactor,
|
||||
options,
|
||||
}: {
|
||||
cdnPath: string;
|
||||
cdnNumber: number;
|
||||
headers?: Record<string, string>;
|
||||
redactor: RedactUrl;
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
};
|
||||
}): Promise<Readable> {
|
||||
const abortController = new AbortController();
|
||||
const cdnUrl = cdnUrlObject[cdnNumber] ?? cdnUrlObject['0'];
|
||||
// This is going to the CDN, not the service, so we use _outerAjax
|
||||
const stream = await _outerAjax(`${cdnUrl}/attachments/${cdnKey}`, {
|
||||
const downloadStream = await _outerAjax(`${cdnUrl}${cdnPath}`, {
|
||||
headers,
|
||||
certificateAuthority,
|
||||
disableRetries: options?.disableRetries,
|
||||
proxyUrl,
|
||||
responseType: 'stream',
|
||||
timeout: options?.timeout || 0,
|
||||
type: 'GET',
|
||||
redactUrl: _createRedactor(cdnKey),
|
||||
redactUrl: redactor,
|
||||
version,
|
||||
abortSignal: abortController.signal,
|
||||
});
|
||||
|
||||
const streamPromise = getStreamWithTimeout(stream, {
|
||||
name: `getAttachment(${cdnKey})`,
|
||||
timeout: GET_ATTACHMENT_CHUNK_TIMEOUT,
|
||||
abortController,
|
||||
});
|
||||
|
||||
// Add callback to central store that would reject a promise
|
||||
const { promise: cancelPromise, reject } = explodePromise<Uint8Array>();
|
||||
const inflightRequest = (error: Error) => {
|
||||
reject(error);
|
||||
abortController.abort();
|
||||
};
|
||||
registerInflightRequest(inflightRequest);
|
||||
|
||||
try {
|
||||
return Promise.race([streamPromise, cancelPromise]);
|
||||
} finally {
|
||||
unregisterInFlightRequest(inflightRequest);
|
||||
}
|
||||
}
|
||||
|
||||
async function getAttachmentV2(
|
||||
cdnKey: string,
|
||||
cdnNumber?: number,
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
}
|
||||
): Promise<Readable> {
|
||||
const abortController = new AbortController();
|
||||
|
||||
const cdnUrl = isNumber(cdnNumber)
|
||||
? cdnUrlObject[cdnNumber] ?? cdnUrlObject['0']
|
||||
: cdnUrlObject['0'];
|
||||
// This is going to the CDN, not the service, so we use _outerAjax
|
||||
const downloadStream = await _outerAjax(
|
||||
`${cdnUrl}/attachments/${cdnKey}`,
|
||||
{
|
||||
certificateAuthority,
|
||||
disableRetries: options?.disableRetries,
|
||||
proxyUrl,
|
||||
responseType: 'stream',
|
||||
timeout: options?.timeout || 0,
|
||||
type: 'GET',
|
||||
redactUrl: _createRedactor(cdnKey),
|
||||
version,
|
||||
abortSignal: abortController.signal,
|
||||
}
|
||||
);
|
||||
|
||||
const timeoutStream = getTimeoutStream({
|
||||
name: `getAttachment(${cdnKey})`,
|
||||
name: `getAttachment(${redactor(cdnPath)})`,
|
||||
timeout: GET_ATTACHMENT_CHUNK_TIMEOUT,
|
||||
abortController,
|
||||
});
|
||||
|
|
|
@ -10,110 +10,139 @@ import { ensureFile } from 'fs-extra';
|
|||
import * as log from '../logging/log';
|
||||
import * as Errors from '../types/errors';
|
||||
import { strictAssert } from '../util/assert';
|
||||
import { dropNull } from '../util/dropNull';
|
||||
import {
|
||||
AttachmentSizeError,
|
||||
type AttachmentType,
|
||||
type DownloadedAttachmentType,
|
||||
} from '../types/Attachment';
|
||||
import { AttachmentSizeError, type AttachmentType } from '../types/Attachment';
|
||||
import * as MIME from '../types/MIME';
|
||||
import * as Bytes from '../Bytes';
|
||||
import { getFirstBytes, decryptAttachmentV1 } from '../Crypto';
|
||||
import {
|
||||
deriveMediaIdFromMediaName,
|
||||
deriveBackupMediaKeyMaterial,
|
||||
type BackupMediaKeyMaterialType,
|
||||
} from '../Crypto';
|
||||
import {
|
||||
decryptAttachmentV2,
|
||||
getAttachmentDownloadSize,
|
||||
getAttachmentCiphertextLength,
|
||||
safeUnlinkSync,
|
||||
splitKeys,
|
||||
} from '../AttachmentCrypto';
|
||||
import type { ProcessedAttachment } from './Types.d';
|
||||
import type { WebAPIType } from './WebAPI';
|
||||
import { createName, getRelativePath } from '../windows/attachments';
|
||||
import { redactCdnKey } from '../util/privacy';
|
||||
import { MediaTier } from '../types/AttachmentDownload';
|
||||
import { getBackupKey } from '../services/backups/crypto';
|
||||
import { backupsService } from '../services/backups';
|
||||
|
||||
export function getCdn(attachment: ProcessedAttachment): string {
|
||||
const { cdnId, cdnKey } = attachment;
|
||||
const cdn = cdnId || cdnKey;
|
||||
strictAssert(cdn, 'Attachment was missing cdnId or cdnKey');
|
||||
return cdn;
|
||||
const DEFAULT_BACKUP_CDN_NUMBER = 3;
|
||||
|
||||
export function getCdnKey(attachment: ProcessedAttachment): string {
|
||||
const cdnKey = attachment.cdnId || attachment.cdnKey;
|
||||
strictAssert(cdnKey, 'Attachment was missing cdnId or cdnKey');
|
||||
return cdnKey;
|
||||
}
|
||||
|
||||
export async function downloadAttachmentV1(
|
||||
server: WebAPIType,
|
||||
attachment: ProcessedAttachment,
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
}
|
||||
): Promise<DownloadedAttachmentType> {
|
||||
const { cdnNumber, key, digest, size, contentType } = attachment;
|
||||
const cdn = getCdn(attachment);
|
||||
|
||||
const encrypted = await server.getAttachment(
|
||||
cdn,
|
||||
dropNull(cdnNumber),
|
||||
options
|
||||
);
|
||||
|
||||
strictAssert(digest, 'Failure: Ask sender to update Signal and resend.');
|
||||
strictAssert(key, 'attachment has no key');
|
||||
|
||||
const paddedData = decryptAttachmentV1(
|
||||
encrypted,
|
||||
Bytes.fromBase64(key),
|
||||
Bytes.fromBase64(digest)
|
||||
);
|
||||
|
||||
if (!isNumber(size)) {
|
||||
throw new Error(
|
||||
`downloadAttachment: Size was not provided, actual size was ${paddedData.byteLength}`
|
||||
);
|
||||
}
|
||||
|
||||
const data = getFirstBytes(paddedData, size);
|
||||
|
||||
return {
|
||||
...attachment,
|
||||
size,
|
||||
contentType: contentType
|
||||
? MIME.stringToMIMEType(contentType)
|
||||
: MIME.APPLICATION_OCTET_STREAM,
|
||||
data,
|
||||
};
|
||||
}
|
||||
|
||||
export async function downloadAttachmentV2(
|
||||
function getMediaIdBytes(attachment: ProcessedAttachment): Uint8Array {
|
||||
const mediaName = attachment.backupLocator?.mediaName;
|
||||
strictAssert(mediaName, 'Attachment was missing mediaName');
|
||||
const backupKey = getBackupKey();
|
||||
return deriveMediaIdFromMediaName(backupKey, mediaName);
|
||||
}
|
||||
|
||||
function getMediaIdForBackupTier(attachment: ProcessedAttachment): string {
|
||||
return Bytes.toBase64url(getMediaIdBytes(attachment));
|
||||
}
|
||||
|
||||
function getBackupMediaKeyMaterial(
|
||||
attachment: ProcessedAttachment
|
||||
): BackupMediaKeyMaterialType {
|
||||
const mediaId = getMediaIdBytes(attachment);
|
||||
const backupKey = getBackupKey();
|
||||
return deriveBackupMediaKeyMaterial(backupKey, mediaId);
|
||||
}
|
||||
|
||||
async function getCdnNumberForBackupTier(
|
||||
attachment: ProcessedAttachment
|
||||
): Promise<number> {
|
||||
strictAssert(
|
||||
attachment.backupLocator,
|
||||
'Attachment was missing backupLocator'
|
||||
);
|
||||
const backupCdnNumber = attachment.backupLocator.cdnNumber;
|
||||
// TODO (DESKTOP-6983): get backupNumber by querying for all media
|
||||
return backupCdnNumber || DEFAULT_BACKUP_CDN_NUMBER;
|
||||
}
|
||||
|
||||
export async function downloadAttachment(
|
||||
server: WebAPIType,
|
||||
attachment: ProcessedAttachment,
|
||||
options?: {
|
||||
disableRetries?: boolean;
|
||||
timeout?: number;
|
||||
onlyFromTransitTier?: boolean;
|
||||
logPrefix?: string;
|
||||
}
|
||||
): Promise<AttachmentType> {
|
||||
const { cdnNumber, contentType, digest, key, size } = attachment;
|
||||
const cdn = getCdn(attachment);
|
||||
const logId = `downloadAttachmentV2(${redactCdnKey(cdn)}:`;
|
||||
const logId = `${options?.logPrefix}/downloadAttachmentV2`;
|
||||
|
||||
const { digest, key, size, contentType } = attachment;
|
||||
|
||||
strictAssert(digest, `${logId}: missing digest`);
|
||||
strictAssert(key, `${logId}: missing key`);
|
||||
strictAssert(isNumber(size), `${logId}: missing size`);
|
||||
|
||||
// TODO (DESKTOP-6845): download attachments differentially based on their
|
||||
// media tier (i.e. transit tier or backup tier)
|
||||
const downloadStream = await server.getAttachmentV2(
|
||||
cdn,
|
||||
dropNull(cdnNumber),
|
||||
options
|
||||
);
|
||||
// TODO (DESKTOP-7043): allow downloading from transit tier even if there is a backup
|
||||
// locator (as fallback)
|
||||
const mediaTier = attachment.backupLocator
|
||||
? MediaTier.BACKUP
|
||||
: MediaTier.STANDARD;
|
||||
|
||||
let downloadedPath: string;
|
||||
if (mediaTier === MediaTier.STANDARD) {
|
||||
const cdnKey = getCdnKey(attachment);
|
||||
const { cdnNumber } = attachment;
|
||||
|
||||
const downloadStream = await server.getAttachment({
|
||||
cdnKey,
|
||||
cdnNumber,
|
||||
options,
|
||||
});
|
||||
downloadedPath = await downloadToDisk({ downloadStream, size });
|
||||
} else {
|
||||
const mediaId = getMediaIdForBackupTier(attachment);
|
||||
const cdnNumber = await getCdnNumberForBackupTier(attachment);
|
||||
const cdnCredentials =
|
||||
await backupsService.credentials.getCDNReadCredentials(cdnNumber);
|
||||
|
||||
const backupDir = await backupsService.api.getBackupDir();
|
||||
const mediaDir = await backupsService.api.getMediaDir();
|
||||
|
||||
const downloadStream = await server.getAttachmentFromBackupTier({
|
||||
mediaId,
|
||||
backupDir,
|
||||
mediaDir,
|
||||
headers: cdnCredentials.headers,
|
||||
cdnNumber,
|
||||
options,
|
||||
});
|
||||
downloadedPath = await downloadToDisk({
|
||||
downloadStream,
|
||||
size: getAttachmentCiphertextLength(size),
|
||||
});
|
||||
}
|
||||
|
||||
const cipherTextRelativePath = await downloadToDisk({ downloadStream, size });
|
||||
const cipherTextAbsolutePath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(cipherTextRelativePath);
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(downloadedPath);
|
||||
|
||||
const { aesKey, macKey } = splitKeys(Bytes.fromBase64(key));
|
||||
const { path, plaintextHash } = await decryptAttachmentV2({
|
||||
ciphertextPath: cipherTextAbsolutePath,
|
||||
id: cdn,
|
||||
keys: Bytes.fromBase64(key),
|
||||
idForLogging: logId,
|
||||
aesKey,
|
||||
macKey,
|
||||
size,
|
||||
theirDigest: Bytes.fromBase64(digest),
|
||||
outerEncryption:
|
||||
mediaTier === 'backup'
|
||||
? getBackupMediaKeyMaterial(attachment)
|
||||
: undefined,
|
||||
});
|
||||
|
||||
safeUnlinkSync(cipherTextAbsolutePath);
|
||||
|
@ -141,7 +170,7 @@ async function downloadToDisk({
|
|||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
await ensureFile(absoluteTargetPath);
|
||||
const writeStream = createWriteStream(absoluteTargetPath);
|
||||
const targetSize = getAttachmentDownloadSize(size);
|
||||
const targetSize = getAttachmentCiphertextLength(size);
|
||||
|
||||
try {
|
||||
await pipeline(downloadStream, checkSize(targetSize), writeStream);
|
||||
|
@ -164,17 +193,27 @@ async function downloadToDisk({
|
|||
// A simple transform that throws if it sees more than maxBytes on the stream.
|
||||
function checkSize(expectedBytes: number) {
|
||||
let totalBytes = 0;
|
||||
|
||||
// TODO (DESKTOP-7046): remove size buffer
|
||||
const maximumSizeBeforeError = expectedBytes * 1.05;
|
||||
return new Transform({
|
||||
transform(chunk, encoding, callback) {
|
||||
totalBytes += chunk.byteLength;
|
||||
if (totalBytes > expectedBytes) {
|
||||
if (totalBytes > maximumSizeBeforeError) {
|
||||
callback(
|
||||
new AttachmentSizeError(
|
||||
`checkSize: Received ${totalBytes} bytes, max is ${expectedBytes}, `
|
||||
`checkSize: Received ${totalBytes} bytes, max is ${maximumSizeBeforeError}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (totalBytes > expectedBytes) {
|
||||
log.warn(
|
||||
`checkSize: Received ${totalBytes} bytes, expected ${expectedBytes}`
|
||||
);
|
||||
}
|
||||
|
||||
this.push(chunk, encoding);
|
||||
callback();
|
||||
},
|
||||
|
|
|
@ -78,6 +78,14 @@ export type AttachmentType = {
|
|||
textAttachment?: TextAttachmentType;
|
||||
wasTooBig?: boolean;
|
||||
|
||||
incrementalMac?: string;
|
||||
incrementalMacChunkSize?: number;
|
||||
|
||||
backupLocator?: {
|
||||
mediaName: string;
|
||||
cdnNumber?: number;
|
||||
};
|
||||
|
||||
/** Legacy field. Used only for downloading old attachments */
|
||||
id?: number;
|
||||
|
||||
|
|
|
@ -4,6 +4,11 @@ import { z } from 'zod';
|
|||
import { MIMETypeSchema, type MIMEType } from './MIME';
|
||||
import type { AttachmentType } from './Attachment';
|
||||
|
||||
export enum MediaTier {
|
||||
STANDARD = 'standard',
|
||||
BACKUP = 'backup',
|
||||
}
|
||||
|
||||
export const attachmentDownloadTypeSchema = z.enum([
|
||||
'long-message',
|
||||
'attachment',
|
||||
|
|
|
@ -89,3 +89,6 @@ export type JSONWithUnknownFields<Value> = Value extends Record<
|
|||
: Value extends Array<infer E>
|
||||
? ReadonlyArray<JSONWithUnknownFields<E>>
|
||||
: Value;
|
||||
|
||||
export type WithRequiredProperties<T, P extends keyof T> = Omit<T, P> &
|
||||
Required<Pick<T, P>>;
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import type { BackupLevel } from '@signalapp/libsignal-client/zkgroup';
|
||||
import type { GetBackupCDNCredentialsResponseType } from '../textsecure/WebAPI';
|
||||
|
||||
export type BackupCredentialType = Readonly<{
|
||||
credential: string;
|
||||
|
@ -18,3 +19,9 @@ export type BackupSignedPresentationType = Readonly<{
|
|||
headers: BackupPresentationHeadersType;
|
||||
level: BackupLevel;
|
||||
}>;
|
||||
|
||||
export type BackupCdnReadCredentialType = Readonly<{
|
||||
credentials: Readonly<GetBackupCDNCredentialsResponseType>;
|
||||
retrievedAtMs: number;
|
||||
cdnNumber: number;
|
||||
}>;
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { getPlaintextHashForAttachmentOnDisk } from '../../AttachmentCrypto';
|
||||
import type { AttachmentType } from '../../types/Attachment';
|
||||
import { DAY } from '../durations';
|
||||
import * as log from '../../logging/log';
|
||||
import { isOlderThan } from '../timestamp';
|
||||
import { getCdn } from '../../textsecure/downloadAttachment';
|
||||
import * as Bytes from '../../Bytes';
|
||||
|
||||
const TIME_IN_ATTACHMENT_TIER = 30 * DAY;
|
||||
|
||||
// We store the plaintext hash as a hex string, but the mediaName should be
|
||||
// the base64 encoded version.
|
||||
function convertHexStringToBase64(hexString: string): string {
|
||||
return Bytes.toBase64(Bytes.fromHex(hexString));
|
||||
}
|
||||
|
||||
type GetMediaNameDependenciesType = {
|
||||
getPlaintextHashForAttachmentOnDisk: (
|
||||
path: string
|
||||
) => Promise<string | undefined>;
|
||||
};
|
||||
|
||||
export async function getMediaNameForBackup(
|
||||
attachment: AttachmentType,
|
||||
senderAci: string,
|
||||
messageTimestamp: number,
|
||||
// allow optional dependency injection for testing
|
||||
dependencies: GetMediaNameDependenciesType = {
|
||||
getPlaintextHashForAttachmentOnDisk,
|
||||
}
|
||||
): Promise<string | undefined> {
|
||||
if (attachment.plaintextHash) {
|
||||
return convertHexStringToBase64(attachment.plaintextHash);
|
||||
}
|
||||
|
||||
if (attachment.path) {
|
||||
const hashFromFileOnDisk =
|
||||
await dependencies.getPlaintextHashForAttachmentOnDisk(
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(attachment.path)
|
||||
);
|
||||
if (!hashFromFileOnDisk) {
|
||||
log.error(
|
||||
'getMediaNameForBackup: no hash from attachment on disk (maybe it is empty?)'
|
||||
);
|
||||
return;
|
||||
}
|
||||
return convertHexStringToBase64(hashFromFileOnDisk);
|
||||
}
|
||||
|
||||
const cdnKey = getCdn(attachment);
|
||||
if (!cdnKey) {
|
||||
log.error('getMediaNameForBackup: attachment has no cdnKey');
|
||||
return;
|
||||
}
|
||||
|
||||
if (isOlderThan(messageTimestamp, TIME_IN_ATTACHMENT_TIER)) {
|
||||
log.error(
|
||||
"getMediaNameForBackup: attachment is not downloaded but is too old; it's no longer in attachment tier."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
return `${senderAci}_${cdnKey}`;
|
||||
}
|
|
@ -2,7 +2,7 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import type { AttachmentType } from '../types/Attachment';
|
||||
import { downloadAttachmentV2 as doDownloadAttachment } from '../textsecure/downloadAttachment';
|
||||
import { downloadAttachment as doDownloadAttachment } from '../textsecure/downloadAttachment';
|
||||
|
||||
export class AttachmentNotFoundOnCdnError extends Error {}
|
||||
export async function downloadAttachment(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue