Attachments: support for incrementalMac and chunkSize

Co-authored-by: Scott Nonnenberg <scott@signal.org>
This commit is contained in:
automated-signal 2024-10-09 15:31:32 -05:00 committed by GitHub
parent 4834e3ddc2
commit aaf9e1a418
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 322 additions and 69 deletions

View file

@ -297,8 +297,9 @@ export const writeNewAttachmentData = async ({
const keys = generateKeys();
const { plaintextHash, path } = await encryptAttachmentV2ToDisk({
plaintext: { data },
getAbsoluteAttachmentPath,
needIncrementalMac: false,
plaintext: { data },
keys,
});

View file

@ -2,12 +2,21 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { createReadStream, createWriteStream } from 'fs';
import { open, unlink } from 'fs/promises';
import { open, unlink, stat } from 'fs/promises';
import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto';
import type { Hash } from 'crypto';
import { PassThrough, Transform, type Writable, Readable } from 'stream';
import { pipeline } from 'stream/promises';
import { isNumber } from 'lodash';
import { ensureFile } from 'fs-extra';
import {
chunkSizeInBytes,
everyNthByte,
inferChunkSize,
} from '@signalapp/libsignal-client/dist/incremental_mac';
import type { ChunkSizeChoice } from '@signalapp/libsignal-client/dist/incremental_mac';
import * as log from './logging/log';
import {
HashType,
@ -31,6 +40,8 @@ import { isNotNil } from './util/isNotNil';
import { missingCaseError } from './util/missingCaseError';
import { getEnvironment, Environment } from './environment';
import { toBase64 } from './Bytes';
import { DigestingPassThrough } from './util/DigestingPassThrough';
import { ValidatingPassThrough } from './util/ValidatingPassThrough';
// This file was split from ts/Crypto.ts because it pulls things in from node, and
// too many things pull in Crypto.ts, so it broke storybook.
@ -53,7 +64,9 @@ export function generateAttachmentKeys(): Uint8Array {
}
export type EncryptedAttachmentV2 = {
chunkSize: number | undefined;
digest: Uint8Array;
incrementalMac: Uint8Array | undefined;
iv: Uint8Array;
plaintextHash: string;
ciphertextSize: number;
@ -83,7 +96,7 @@ export type DecryptedAttachmentV2 = {
export type PlaintextSourceType =
| { data: Uint8Array }
| { stream: Readable }
| { stream: Readable; size?: number }
| { absolutePath: string };
export type HardcodedIVForEncryptionType =
@ -98,11 +111,12 @@ export type HardcodedIVForEncryptionType =
};
type EncryptAttachmentV2PropsType = {
plaintext: PlaintextSourceType;
keys: Readonly<Uint8Array>;
dangerousIv?: HardcodedIVForEncryptionType;
dangerousTestOnlySkipPadding?: boolean;
getAbsoluteAttachmentPath: (relativePath: string) => string;
keys: Readonly<Uint8Array>;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
};
export async function encryptAttachmentV2ToDisk(
@ -132,10 +146,11 @@ export async function encryptAttachmentV2ToDisk(
};
}
export async function encryptAttachmentV2({
keys,
plaintext,
dangerousIv,
dangerousTestOnlySkipPadding,
keys,
needIncrementalMac,
plaintext,
sink,
}: EncryptAttachmentV2PropsType & {
sink?: Writable;
@ -176,17 +191,42 @@ export async function encryptAttachmentV2({
let ciphertextSize: number | undefined;
let mac: Uint8Array | undefined;
let incrementalDigestCreator: DigestingPassThrough | undefined;
let chunkSizeChoice: ChunkSizeChoice | undefined;
try {
let source: Readable;
let size;
if ('data' in plaintext) {
source = Readable.from([Buffer.from(plaintext.data)]);
const { data } = plaintext;
source = Readable.from([Buffer.from(data)]);
size = data.byteLength;
} else if ('stream' in plaintext) {
source = plaintext.stream;
size = plaintext.size;
} else {
source = createReadStream(plaintext.absolutePath);
const { absolutePath } = plaintext;
if (needIncrementalMac) {
const fileData = await stat(absolutePath);
size = fileData.size;
}
source = createReadStream(absolutePath);
}
if (needIncrementalMac) {
strictAssert(
isNumber(size),
'Need size if we are to generate incrementalMac!'
);
}
chunkSizeChoice = isNumber(size)
? inferChunkSize(getAttachmentCiphertextLength(size))
: undefined;
incrementalDigestCreator =
needIncrementalMac && chunkSizeChoice
? new DigestingPassThrough(Buffer.from(macKey), chunkSizeChoice)
: undefined;
await pipeline(
[
source,
@ -198,8 +238,9 @@ export async function encryptAttachmentV2({
mac = macValue;
}),
peekAndUpdateHash(digest),
measureSize(size => {
ciphertextSize = size;
incrementalDigestCreator,
measureSize(finalSize => {
ciphertextSize = finalSize;
}),
sink ?? new PassThrough().resume(),
].filter(isNotNil)
@ -236,11 +277,18 @@ export async function encryptAttachmentV2({
}
}
const incrementalMac = incrementalDigestCreator?.getFinalDigest();
return {
chunkSize:
incrementalMac && chunkSizeChoice
? chunkSizeInBytes(chunkSizeChoice)
: undefined,
ciphertextSize,
digest: ourDigest,
incrementalMac,
iv,
plaintextHash: ourPlaintextHash,
ciphertextSize,
};
}
@ -257,6 +305,8 @@ type DecryptAttachmentToSinkOptionsType = Readonly<
| {
type: 'standard';
theirDigest: Readonly<Uint8Array>;
theirIncrementalMac: Readonly<Uint8Array> | undefined;
theirChunkSize: number | undefined;
}
| {
// No need to check integrity for locally reencrypted attachments, or for backup
@ -326,7 +376,7 @@ export async function decryptAttachmentV2ToSink(
options: DecryptAttachmentToSinkOptionsType,
sink: Writable
): Promise<Omit<DecryptedAttachmentV2, 'path'>> {
const { idForLogging, ciphertextPath, outerEncryption } = options;
const { ciphertextPath, idForLogging, outerEncryption } = options;
let aesKey: Uint8Array;
let macKey: Uint8Array;
@ -345,6 +395,18 @@ export async function decryptAttachmentV2ToSink(
const digest = createHash(HashType.size256);
const hmac = createHmac(HashType.size256, macKey);
const plaintextHash = createHash(HashType.size256);
const incrementalDigestValidator =
options.type === 'standard' &&
options.theirIncrementalMac &&
options.theirChunkSize
? new ValidatingPassThrough(
Buffer.from(macKey),
everyNthByte(options.theirChunkSize),
Buffer.from(options.theirIncrementalMac)
)
: undefined;
let theirMac: Uint8Array | undefined;
// When downloading from backup there is an outer encryption layer; in that case we
@ -380,6 +442,7 @@ export async function decryptAttachmentV2ToSink(
maybeOuterEncryptionGetMacAndUpdateMac,
maybeOuterEncryptionGetIvAndDecipher,
peekAndUpdateHash(digest),
incrementalDigestValidator,
getMacAndUpdateHmac(hmac, theirMacValue => {
theirMac = theirMacValue;
}),
@ -495,7 +558,6 @@ export async function decryptAndReencryptLocally(
options: DecryptAttachmentOptionsType
): Promise<ReencryptedAttachmentV2> {
const { idForLogging } = options;
const logId = `reencryptAttachmentV2(${idForLogging})`;
// Create random output file
@ -518,12 +580,13 @@ export async function decryptAndReencryptLocally(
const [result] = await Promise.all([
decryptAttachmentV2ToSink(options, passthrough),
await encryptAttachmentV2({
getAbsoluteAttachmentPath: options.getAbsoluteAttachmentPath,
keys,
needIncrementalMac: false,
plaintext: {
stream: passthrough,
},
sink: createWriteStream(absoluteTargetPath),
getAbsoluteAttachmentPath: options.getAbsoluteAttachmentPath,
}),
]);

View file

@ -61,6 +61,8 @@ import {
} from '../util/GoogleChrome';
import { getLocalAttachmentUrl } from '../util/getLocalAttachmentUrl';
import { findRetryAfterTimeFromError } from './helpers/findRetryAfterTimeFromError';
import { supportsIncrementalMac } from '../types/MIME';
import type { MIMEType } from '../types/MIME';
const MAX_CONCURRENT_JOBS = 3;
const RETRY_CONFIG = {
@ -269,8 +271,17 @@ async function backupStandardAttachment(
) {
const jobIdForLogging = getJobIdForLogging(job);
const logId = `AttachmentBackupManager.backupStandardAttachment(${jobIdForLogging})`;
const { path, transitCdnInfo, iv, digest, keys, size, version, localKey } =
job.data;
const {
contentType,
digest,
iv,
keys,
localKey,
path,
size,
transitCdnInfo,
version,
} = job.data;
const mediaId = getMediaIdFromMediaName(job.mediaName);
const backupKeyMaterial = deriveBackupMediaKeyMaterial(
@ -326,14 +337,15 @@ async function backupStandardAttachment(
log.info(`${logId}: uploading to transit tier`);
const uploadResult = await uploadToTransitTier({
absolutePath,
version,
localKey,
size,
keys,
iv,
digest,
logPrefix: logId,
contentType,
dependencies,
digest,
iv,
keys,
localKey,
logPrefix: logId,
size,
version,
});
log.info(`${logId}: copying to backup tier`);
@ -386,11 +398,11 @@ async function backupThumbnailAttachment(
let thumbnail: CreatedThumbnailType;
const fullsizeUrl = getLocalAttachmentUrl({
contentType,
localKey,
path: fullsizePath,
size: fullsizeSize,
contentType,
version,
localKey,
});
if (isVideoTypeSupported(contentType)) {
@ -423,17 +435,17 @@ async function backupThumbnailAttachment(
log.info(`${logId}: uploading thumbnail to transit tier`);
const uploadResult = await uploadThumbnailToTransitTier({
data: thumbnail.data,
dependencies,
keys: toBase64(Buffer.concat([aesKey, macKey])),
logPrefix: logId,
dependencies,
});
log.info(`${logId}: copying thumbnail to backup tier`);
await copyToBackupTier({
cdnKey: uploadResult.cdnKey,
cdnNumber: uploadResult.cdnNumber,
size: thumbnail.data.byteLength,
mediaId: mediaId.string,
size: thumbnail.data.byteLength,
...backupKeyMaterial,
dependencies,
});
@ -441,17 +453,18 @@ async function backupThumbnailAttachment(
type UploadToTransitTierArgsType = {
absolutePath: string;
iv: string;
digest: string;
keys: string;
version?: AttachmentType['version'];
localKey?: string;
size: number;
logPrefix: string;
contentType: MIMEType;
dependencies: {
decryptAttachmentV2ToSink: typeof decryptAttachmentV2ToSink;
encryptAndUploadAttachment: typeof encryptAndUploadAttachment;
};
digest: string;
iv: string;
keys: string;
localKey?: string;
logPrefix: string;
size: number;
version?: AttachmentType['version'];
};
type UploadResponseType = {
@ -461,15 +474,18 @@ type UploadResponseType = {
};
async function uploadToTransitTier({
absolutePath,
keys,
version,
localKey,
size,
iv,
digest,
logPrefix,
contentType,
dependencies,
digest,
iv,
keys,
localKey,
logPrefix,
size,
version,
}: UploadToTransitTierArgsType): Promise<UploadResponseType> {
const needIncrementalMac = supportsIncrementalMac(contentType);
try {
if (version === 2) {
strictAssert(
@ -484,8 +500,8 @@ async function uploadToTransitTier({
const [, result] = await Promise.all([
dependencies.decryptAttachmentV2ToSink(
{
idForLogging: 'uploadToTransitTier',
ciphertextPath: absolutePath,
idForLogging: 'uploadToTransitTier',
keysBase64: localKey,
size,
type: 'local',
@ -493,13 +509,14 @@ async function uploadToTransitTier({
sink
),
dependencies.encryptAndUploadAttachment({
plaintext: { stream: sink },
keys: fromBase64(keys),
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: fromBase64(iv),
digestToMatch: fromBase64(digest),
},
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { stream: sink, size },
uploadType: 'backup',
}),
]);
@ -509,13 +526,14 @@ async function uploadToTransitTier({
// Legacy attachments
return dependencies.encryptAndUploadAttachment({
plaintext: { absolutePath },
keys: fromBase64(keys),
dangerousIv: {
reason: 'reencrypting-for-backup',
iv: fromBase64(iv),
digestToMatch: fromBase64(digest),
},
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { absolutePath },
uploadType: 'backup',
});
} catch (error) {
@ -545,6 +563,7 @@ async function uploadThumbnailToTransitTier({
const uploadResult = await dependencies.encryptAndUploadAttachment({
plaintext: { data },
keys: fromBase64(keys),
needIncrementalMac: false,
uploadType: 'backup',
});
return uploadResult;

View file

@ -27,6 +27,7 @@ const UNPROCESSED_ATTACHMENT: Proto.IAttachmentPointer = {
key: new Uint8Array([1, 2, 3]),
digest: new Uint8Array([4, 5, 6]),
contentType: IMAGE_GIF,
incrementalMac: new Uint8Array(),
size: 34,
};
@ -36,6 +37,7 @@ const PROCESSED_ATTACHMENT: ProcessedAttachment = {
key: 'AQID',
digest: 'BAUG',
contentType: IMAGE_GIF,
incrementalMac: undefined,
size: 34,
};
@ -84,6 +86,27 @@ describe('processDataMessage', () => {
]);
});
it('should process attachments with incrementalMac/chunkSize', () => {
const out = check({
attachments: [
{
...UNPROCESSED_ATTACHMENT,
incrementalMac: new Uint8Array([0, 0, 0]),
chunkSize: 2,
},
],
});
assert.deepStrictEqual(out.attachments, [
{
...PROCESSED_ATTACHMENT,
chunkSize: 2,
downloadPath: 'random-path',
incrementalMac: 'AAAA',
},
]);
});
it('should throw on too many attachments', () => {
const attachments: Array<Proto.IAttachmentPointer> = [];
for (let i = 0; i < ATTACHMENT_MAX + 1; i += 1) {

View file

@ -44,10 +44,11 @@ describe('ContactsParser', () => {
const keys = generateKeys();
({ path } = await encryptAttachmentV2ToDisk({
plaintext: { data },
keys,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
plaintext: { data },
}));
const contacts = await parseContactsV2({

View file

@ -1,11 +1,13 @@
// Copyright 2015 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
import { join } from 'path';
import { createCipheriv } from 'crypto';
import { assert } from 'chai';
import { isNumber } from 'lodash';
import * as log from '../logging/log';
import * as Bytes from '../Bytes';
import * as Curve from '../Curve';
@ -584,6 +586,8 @@ describe('Crypto', () => {
...splitKeys(keys),
size: FILE_CONTENTS.byteLength,
theirDigest: encryptedAttachment.digest,
theirIncrementalMac: undefined,
theirChunkSize: undefined,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
@ -611,6 +615,7 @@ describe('Crypto', () => {
plaintextHash,
encryptionKeys,
dangerousIv,
modifyIncrementalMac,
overrideSize,
}: {
path?: string;
@ -618,6 +623,7 @@ describe('Crypto', () => {
plaintextHash?: Uint8Array;
encryptionKeys?: Uint8Array;
dangerousIv?: HardcodedIVForEncryptionType;
modifyIncrementalMac?: boolean;
overrideSize?: number;
}): Promise<DecryptedAttachmentV2> {
let plaintextPath;
@ -631,12 +637,22 @@ describe('Crypto', () => {
dangerousIv,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
);
const macLength = encryptedAttachment.incrementalMac?.length;
if (
modifyIncrementalMac &&
isNumber(macLength) &&
encryptedAttachment.incrementalMac
) {
encryptedAttachment.incrementalMac[macLength / 2] += 1;
}
const decryptedAttachment = await decryptAttachmentV2({
type: 'standard',
ciphertextPath,
@ -644,6 +660,8 @@ describe('Crypto', () => {
...splitKeys(keys),
size: overrideSize ?? data.byteLength,
theirDigest: encryptedAttachment.digest,
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
@ -736,6 +754,25 @@ describe('Crypto', () => {
unlinkSync(sourcePath);
}
});
it('v2 fails decrypt for large disk file if incrementalMac is wrong', async () => {
const sourcePath = join(tempDir, 'random');
const data = getRandomBytes(5 * 1024 * 1024);
const plaintextHash = sha256(data);
writeFileSync(sourcePath, data);
try {
await assert.isRejected(
testV2RoundTripData({
path: sourcePath,
data,
plaintextHash,
modifyIncrementalMac: true,
}),
/Corrupted/
);
} finally {
unlinkSync(sourcePath);
}
});
it('v2 roundtrips large file from memory', async () => {
// Get sufficient large data to have more than 64kb of padding and
@ -785,6 +822,7 @@ describe('Crypto', () => {
plaintext: { data: FILE_CONTENTS },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
await testV2RoundTripData({
@ -826,6 +864,7 @@ describe('Crypto', () => {
plaintext: { absolutePath: FILE_PATH },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
@ -882,6 +921,7 @@ describe('Crypto', () => {
dangerousIv: { iv: dangerousTestOnlyIv, reason: 'test' },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachmentV2.path
@ -918,6 +958,7 @@ describe('Crypto', () => {
plaintext: { absolutePath: plaintextAbsolutePath },
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
});
innerCiphertextPath =
window.Signal.Migrations.getAbsoluteAttachmentPath(
@ -931,6 +972,7 @@ describe('Crypto', () => {
dangerousTestOnlySkipPadding: true,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
});
outerCiphertextPath =
@ -969,6 +1011,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys),
size: FILE_CONTENTS.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: splitKeys(outerKeys),
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
@ -1025,6 +1070,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys),
size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: splitKeys(outerKeys),
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
@ -1075,6 +1123,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys),
size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: {
aesKey: splitKeys(outerKeys).aesKey,
macKey: splitKeys(innerKeys).macKey, // wrong mac!

View file

@ -107,6 +107,7 @@ describe('AttachmentBackupManager/JobManager', function attachmentBackupManager(
absolutePath: join(__dirname, '../../../fixtures/cat-gif.mp4'),
},
keys: Bytes.fromBase64(LOCAL_ENCRYPTION_KEYS),
needIncrementalMac: false,
sink: createWriteStream(absolutePath),
getAbsoluteAttachmentPath,
});

View file

@ -40,6 +40,7 @@ describe('utils/ensureAttachmentIsReencryptable', async () => {
},
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
});
digest = encrypted.digest;
iv = encrypted.iv;

View file

@ -161,6 +161,7 @@ describe('attachments', function (this: Mocha.Suite) {
},
getAbsoluteAttachmentPath: relativePath =>
bootstrap.getAbsoluteAttachmentPath(relativePath),
needIncrementalMac: false,
});
const ciphertextCatWithNonZeroPadding = readFileSync(

View file

@ -16,6 +16,7 @@ import { dropNull } from '../util/dropNull';
import { decryptAttachmentV2ToSink } from '../AttachmentCrypto';
import Avatar = Proto.ContactDetails.IAvatar;
import { stringToMIMEType } from '../types/MIME';
const { Reader } = protobuf;
@ -152,9 +153,13 @@ export class ParseContactsTransform extends Transform {
// eslint-disable-next-line no-await-in-loop
await window.Signal.Migrations.writeNewAttachmentData(avatarData);
const contentType = this.activeContact.avatar?.contentType;
const prepared = prepareContact(this.activeContact, {
...this.activeContact.avatar,
...local,
contentType: contentType
? stringToMIMEType(contentType)
: undefined,
hash,
});
if (prepared) {

View file

@ -120,6 +120,8 @@ export type ProcessedAttachment = {
textAttachment?: Omit<TextAttachmentType, 'preview'>;
backupLocator?: AttachmentType['backupLocator'];
downloadPath?: string;
incrementalMac?: string;
chunkSize?: number;
};
export type ProcessedGroupV2Context = {

View file

@ -113,7 +113,7 @@ export async function downloadAttachment(
): Promise<ReencryptedAttachmentV2 & { size?: number }> {
const logId = `downloadAttachment/${options.logPrefix ?? ''}`;
const { digest, key, size } = attachment;
const { chunkSize, digest, incrementalMac, key, size } = attachment;
strictAssert(digest, `${logId}: missing digest`);
strictAssert(key, `${logId}: missing key`);
@ -232,6 +232,10 @@ export async function downloadAttachment(
macKey,
size,
theirDigest: Bytes.fromBase64(digest),
theirIncrementalMac: incrementalMac
? Bytes.fromBase64(incrementalMac)
: undefined,
theirChunkSize: chunkSize,
outerEncryption:
mediaTier === 'backup'
? getBackupMediaOuterEncryptionKeyMaterial(attachment)

View file

@ -54,7 +54,8 @@ export function processAttachment(
const { cdnId } = attachment;
const hasCdnId = Long.isLong(cdnId) ? !cdnId.isZero() : Boolean(cdnId);
const { clientUuid, contentType, digest, key, size } = attachment;
const { clientUuid, contentType, digest, incrementalMac, key, size } =
attachment;
if (!isNumber(size)) {
throw new Error('Missing size on incoming attachment!');
}
@ -63,12 +64,17 @@ export function processAttachment(
...shallowDropNull(attachment),
cdnId: hasCdnId ? String(cdnId) : undefined,
clientUuid: clientUuid ? bytesToUuid(clientUuid) : undefined,
clientUuid: Bytes.isNotEmpty(clientUuid)
? bytesToUuid(clientUuid)
: undefined,
contentType: contentType
? stringToMIMEType(contentType)
: APPLICATION_OCTET_STREAM,
digest: digest ? Bytes.toBase64(digest) : undefined,
key: key ? Bytes.toBase64(key) : undefined,
digest: Bytes.isNotEmpty(digest) ? Bytes.toBase64(digest) : undefined,
incrementalMac: Bytes.isNotEmpty(incrementalMac)
? Bytes.toBase64(incrementalMac)
: undefined,
key: Bytes.isNotEmpty(key) ? Bytes.toBase64(key) : undefined,
size,
};
}

View file

@ -130,6 +130,7 @@ export type AddressableAttachmentType = Readonly<{
path: string;
localKey?: string;
size?: number;
contentType: MIME.MIMEType;
// In-memory data, for outgoing attachments that are not saved to disk.
data?: Uint8Array;

View file

@ -48,3 +48,6 @@ export const isAudio = (value: string): value is MIMEType =>
Boolean(value) && value.startsWith('audio/') && !value.endsWith('aiff');
export const isLongMessage = (value: unknown): value is MIMEType =>
value === LONG_MESSAGE;
export const supportsIncrementalMac = (value: unknown): boolean => {
return value === VIDEO_MP4;
};

View file

@ -0,0 +1,52 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { Transform } from 'stream';
import { DigestingWritable } from '@signalapp/libsignal-client/dist/incremental_mac';
import type { ChunkSizeChoice } from '@signalapp/libsignal-client/dist/incremental_mac';
type CallbackType = (error?: Error | null) => void;
export class DigestingPassThrough extends Transform {
private digester: DigestingWritable;
constructor(key: Buffer, sizeChoice: ChunkSizeChoice) {
super();
this.digester = new DigestingWritable(key, sizeChoice);
// We handle errors coming from write/end
this.digester.on('error', () => {
/* noop */
});
}
getFinalDigest(): Buffer {
return this.digester.getFinalDigest();
}
public override _transform(
data: Buffer,
enc: BufferEncoding,
callback: CallbackType
): void {
this.push(data);
this.digester.write(data, enc, err => {
if (err) {
return callback(err);
}
callback();
});
}
public override _final(callback: CallbackType): void {
this.digester.end((err?: Error) => {
if (err) {
return callback(err);
}
callback();
});
}
}

View file

@ -81,11 +81,13 @@ export type CdnFieldsType = Pick<
| 'cdnId'
| 'cdnKey'
| 'cdnNumber'
| 'key'
| 'digest'
| 'iv'
| 'plaintextHash'
| 'incrementalMac'
| 'incrementalMacChunkSize'
| 'isReencryptableToSameDigest'
| 'iv'
| 'key'
| 'plaintextHash'
>;
export function copyCdnFields(
@ -98,10 +100,14 @@ export function copyCdnFields(
cdnId: dropNull(uploaded.cdnId)?.toString(),
cdnKey: uploaded.cdnKey,
cdnNumber: dropNull(uploaded.cdnNumber),
key: Bytes.toBase64(uploaded.key),
iv: Bytes.toBase64(uploaded.iv),
digest: Bytes.toBase64(uploaded.digest),
plaintextHash: uploaded.plaintextHash,
incrementalMac: uploaded.incrementalMac
? Bytes.toBase64(uploaded.incrementalMac)
: undefined,
incrementalMacChunkSize: dropNull(uploaded.chunkSize),
isReencryptableToSameDigest: uploaded.isReencryptableToSameDigest,
iv: Bytes.toBase64(uploaded.iv),
key: Bytes.toBase64(uploaded.key),
plaintextHash: uploaded.plaintextHash,
};
}

View file

@ -94,6 +94,7 @@ export async function attemptToReencryptToOriginalDigest(
attachment.path
),
},
needIncrementalMac: false,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
@ -117,6 +118,7 @@ export async function attemptToReencryptToOriginalDigest(
encryptAttachmentV2({
plaintext: {
stream: passthrough,
size: attachment.size,
},
keys: fromBase64(key),
dangerousIv: {
@ -124,6 +126,7 @@ export async function attemptToReencryptToOriginalDigest(
reason: 'reencrypting-for-backup',
digestToMatch: fromBase64(digest),
},
needIncrementalMac: false,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
}),
@ -146,6 +149,7 @@ export async function generateNewEncryptionInfoForAttachment(
attachment.path
),
},
needIncrementalMac: false,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
});
@ -170,7 +174,9 @@ export async function generateNewEncryptionInfoForAttachment(
keys: newKeys,
plaintext: {
stream: passthrough,
size: attachment.size,
},
needIncrementalMac: false,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
}),

View file

@ -5,7 +5,7 @@ import type {
AttachmentWithHydratedData,
UploadedAttachmentType,
} from '../types/Attachment';
import { MIMETypeToString } from '../types/MIME';
import { MIMETypeToString, supportsIncrementalMac } from '../types/MIME';
import { getRandomBytes } from '../Crypto';
import { strictAssert } from './assert';
import { backupsService } from '../services/backups';
@ -31,10 +31,12 @@ export async function uploadAttachment(
strictAssert(server, 'WebAPI must be initialized');
const keys = getRandomBytes(64);
const needIncrementalMac = supportsIncrementalMac(attachment.contentType);
const { cdnKey, cdnNumber, encrypted } = await encryptAndUploadAttachment({
plaintext: { data: attachment.data },
keys,
needIncrementalMac,
plaintext: { data: attachment.data },
uploadType: 'standard',
});
@ -50,6 +52,8 @@ export async function uploadAttachment(
size: attachment.data.byteLength,
digest: encrypted.digest,
plaintextHash: encrypted.plaintextHash,
incrementalMac: encrypted.incrementalMac,
chunkSize: encrypted.chunkSize,
contentType: MIMETypeToString(attachment.contentType),
fileName,
@ -63,14 +67,16 @@ export async function uploadAttachment(
}
export async function encryptAndUploadAttachment({
plaintext,
keys,
dangerousIv,
keys,
needIncrementalMac,
plaintext,
uploadType,
}: {
plaintext: PlaintextSourceType;
keys: Uint8Array;
dangerousIv?: HardcodedIVForEncryptionType;
keys: Uint8Array;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
uploadType: 'standard' | 'backup';
}): Promise<{
cdnKey: string;
@ -98,11 +104,12 @@ export async function encryptAndUploadAttachment({
}
const encrypted = await encryptAttachmentV2ToDisk({
plaintext,
keys,
dangerousIv,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
keys,
needIncrementalMac,
plaintext,
});
absoluteCiphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(