Attachments: support for incrementalMac and chunkSize

Co-authored-by: Scott Nonnenberg <scott@signal.org>
This commit is contained in:
automated-signal 2024-10-09 15:31:32 -05:00 committed by GitHub
parent 4834e3ddc2
commit aaf9e1a418
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 322 additions and 69 deletions

View file

@ -297,8 +297,9 @@ export const writeNewAttachmentData = async ({
const keys = generateKeys(); const keys = generateKeys();
const { plaintextHash, path } = await encryptAttachmentV2ToDisk({ const { plaintextHash, path } = await encryptAttachmentV2ToDisk({
plaintext: { data },
getAbsoluteAttachmentPath, getAbsoluteAttachmentPath,
needIncrementalMac: false,
plaintext: { data },
keys, keys,
}); });

View file

@ -2,12 +2,21 @@
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
import { createReadStream, createWriteStream } from 'fs'; import { createReadStream, createWriteStream } from 'fs';
import { open, unlink } from 'fs/promises'; import { open, unlink, stat } from 'fs/promises';
import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto'; import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto';
import type { Hash } from 'crypto'; import type { Hash } from 'crypto';
import { PassThrough, Transform, type Writable, Readable } from 'stream'; import { PassThrough, Transform, type Writable, Readable } from 'stream';
import { pipeline } from 'stream/promises'; import { pipeline } from 'stream/promises';
import { isNumber } from 'lodash';
import { ensureFile } from 'fs-extra'; import { ensureFile } from 'fs-extra';
import {
chunkSizeInBytes,
everyNthByte,
inferChunkSize,
} from '@signalapp/libsignal-client/dist/incremental_mac';
import type { ChunkSizeChoice } from '@signalapp/libsignal-client/dist/incremental_mac';
import * as log from './logging/log'; import * as log from './logging/log';
import { import {
HashType, HashType,
@ -31,6 +40,8 @@ import { isNotNil } from './util/isNotNil';
import { missingCaseError } from './util/missingCaseError'; import { missingCaseError } from './util/missingCaseError';
import { getEnvironment, Environment } from './environment'; import { getEnvironment, Environment } from './environment';
import { toBase64 } from './Bytes'; import { toBase64 } from './Bytes';
import { DigestingPassThrough } from './util/DigestingPassThrough';
import { ValidatingPassThrough } from './util/ValidatingPassThrough';
// This file was split from ts/Crypto.ts because it pulls things in from node, and // This file was split from ts/Crypto.ts because it pulls things in from node, and
// too many things pull in Crypto.ts, so it broke storybook. // too many things pull in Crypto.ts, so it broke storybook.
@ -53,7 +64,9 @@ export function generateAttachmentKeys(): Uint8Array {
} }
export type EncryptedAttachmentV2 = { export type EncryptedAttachmentV2 = {
chunkSize: number | undefined;
digest: Uint8Array; digest: Uint8Array;
incrementalMac: Uint8Array | undefined;
iv: Uint8Array; iv: Uint8Array;
plaintextHash: string; plaintextHash: string;
ciphertextSize: number; ciphertextSize: number;
@ -83,7 +96,7 @@ export type DecryptedAttachmentV2 = {
export type PlaintextSourceType = export type PlaintextSourceType =
| { data: Uint8Array } | { data: Uint8Array }
| { stream: Readable } | { stream: Readable; size?: number }
| { absolutePath: string }; | { absolutePath: string };
export type HardcodedIVForEncryptionType = export type HardcodedIVForEncryptionType =
@ -98,11 +111,12 @@ export type HardcodedIVForEncryptionType =
}; };
type EncryptAttachmentV2PropsType = { type EncryptAttachmentV2PropsType = {
plaintext: PlaintextSourceType;
keys: Readonly<Uint8Array>;
dangerousIv?: HardcodedIVForEncryptionType; dangerousIv?: HardcodedIVForEncryptionType;
dangerousTestOnlySkipPadding?: boolean; dangerousTestOnlySkipPadding?: boolean;
getAbsoluteAttachmentPath: (relativePath: string) => string; getAbsoluteAttachmentPath: (relativePath: string) => string;
keys: Readonly<Uint8Array>;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
}; };
export async function encryptAttachmentV2ToDisk( export async function encryptAttachmentV2ToDisk(
@ -132,10 +146,11 @@ export async function encryptAttachmentV2ToDisk(
}; };
} }
export async function encryptAttachmentV2({ export async function encryptAttachmentV2({
keys,
plaintext,
dangerousIv, dangerousIv,
dangerousTestOnlySkipPadding, dangerousTestOnlySkipPadding,
keys,
needIncrementalMac,
plaintext,
sink, sink,
}: EncryptAttachmentV2PropsType & { }: EncryptAttachmentV2PropsType & {
sink?: Writable; sink?: Writable;
@ -176,17 +191,42 @@ export async function encryptAttachmentV2({
let ciphertextSize: number | undefined; let ciphertextSize: number | undefined;
let mac: Uint8Array | undefined; let mac: Uint8Array | undefined;
let incrementalDigestCreator: DigestingPassThrough | undefined;
let chunkSizeChoice: ChunkSizeChoice | undefined;
try { try {
let source: Readable; let source: Readable;
let size;
if ('data' in plaintext) { if ('data' in plaintext) {
source = Readable.from([Buffer.from(plaintext.data)]); const { data } = plaintext;
source = Readable.from([Buffer.from(data)]);
size = data.byteLength;
} else if ('stream' in plaintext) { } else if ('stream' in plaintext) {
source = plaintext.stream; source = plaintext.stream;
size = plaintext.size;
} else { } else {
source = createReadStream(plaintext.absolutePath); const { absolutePath } = plaintext;
if (needIncrementalMac) {
const fileData = await stat(absolutePath);
size = fileData.size;
}
source = createReadStream(absolutePath);
} }
if (needIncrementalMac) {
strictAssert(
isNumber(size),
'Need size if we are to generate incrementalMac!'
);
}
chunkSizeChoice = isNumber(size)
? inferChunkSize(getAttachmentCiphertextLength(size))
: undefined;
incrementalDigestCreator =
needIncrementalMac && chunkSizeChoice
? new DigestingPassThrough(Buffer.from(macKey), chunkSizeChoice)
: undefined;
await pipeline( await pipeline(
[ [
source, source,
@ -198,8 +238,9 @@ export async function encryptAttachmentV2({
mac = macValue; mac = macValue;
}), }),
peekAndUpdateHash(digest), peekAndUpdateHash(digest),
measureSize(size => { incrementalDigestCreator,
ciphertextSize = size; measureSize(finalSize => {
ciphertextSize = finalSize;
}), }),
sink ?? new PassThrough().resume(), sink ?? new PassThrough().resume(),
].filter(isNotNil) ].filter(isNotNil)
@ -236,11 +277,18 @@ export async function encryptAttachmentV2({
} }
} }
const incrementalMac = incrementalDigestCreator?.getFinalDigest();
return { return {
chunkSize:
incrementalMac && chunkSizeChoice
? chunkSizeInBytes(chunkSizeChoice)
: undefined,
ciphertextSize,
digest: ourDigest, digest: ourDigest,
incrementalMac,
iv, iv,
plaintextHash: ourPlaintextHash, plaintextHash: ourPlaintextHash,
ciphertextSize,
}; };
} }
@ -257,6 +305,8 @@ type DecryptAttachmentToSinkOptionsType = Readonly<
| { | {
type: 'standard'; type: 'standard';
theirDigest: Readonly<Uint8Array>; theirDigest: Readonly<Uint8Array>;
theirIncrementalMac: Readonly<Uint8Array> | undefined;
theirChunkSize: number | undefined;
} }
| { | {
// No need to check integrity for locally reencrypted attachments, or for backup // No need to check integrity for locally reencrypted attachments, or for backup
@ -326,7 +376,7 @@ export async function decryptAttachmentV2ToSink(
options: DecryptAttachmentToSinkOptionsType, options: DecryptAttachmentToSinkOptionsType,
sink: Writable sink: Writable
): Promise<Omit<DecryptedAttachmentV2, 'path'>> { ): Promise<Omit<DecryptedAttachmentV2, 'path'>> {
const { idForLogging, ciphertextPath, outerEncryption } = options; const { ciphertextPath, idForLogging, outerEncryption } = options;
let aesKey: Uint8Array; let aesKey: Uint8Array;
let macKey: Uint8Array; let macKey: Uint8Array;
@ -345,6 +395,18 @@ export async function decryptAttachmentV2ToSink(
const digest = createHash(HashType.size256); const digest = createHash(HashType.size256);
const hmac = createHmac(HashType.size256, macKey); const hmac = createHmac(HashType.size256, macKey);
const plaintextHash = createHash(HashType.size256); const plaintextHash = createHash(HashType.size256);
const incrementalDigestValidator =
options.type === 'standard' &&
options.theirIncrementalMac &&
options.theirChunkSize
? new ValidatingPassThrough(
Buffer.from(macKey),
everyNthByte(options.theirChunkSize),
Buffer.from(options.theirIncrementalMac)
)
: undefined;
let theirMac: Uint8Array | undefined; let theirMac: Uint8Array | undefined;
// When downloading from backup there is an outer encryption layer; in that case we // When downloading from backup there is an outer encryption layer; in that case we
@ -380,6 +442,7 @@ export async function decryptAttachmentV2ToSink(
maybeOuterEncryptionGetMacAndUpdateMac, maybeOuterEncryptionGetMacAndUpdateMac,
maybeOuterEncryptionGetIvAndDecipher, maybeOuterEncryptionGetIvAndDecipher,
peekAndUpdateHash(digest), peekAndUpdateHash(digest),
incrementalDigestValidator,
getMacAndUpdateHmac(hmac, theirMacValue => { getMacAndUpdateHmac(hmac, theirMacValue => {
theirMac = theirMacValue; theirMac = theirMacValue;
}), }),
@ -495,7 +558,6 @@ export async function decryptAndReencryptLocally(
options: DecryptAttachmentOptionsType options: DecryptAttachmentOptionsType
): Promise<ReencryptedAttachmentV2> { ): Promise<ReencryptedAttachmentV2> {
const { idForLogging } = options; const { idForLogging } = options;
const logId = `reencryptAttachmentV2(${idForLogging})`; const logId = `reencryptAttachmentV2(${idForLogging})`;
// Create random output file // Create random output file
@ -518,12 +580,13 @@ export async function decryptAndReencryptLocally(
const [result] = await Promise.all([ const [result] = await Promise.all([
decryptAttachmentV2ToSink(options, passthrough), decryptAttachmentV2ToSink(options, passthrough),
await encryptAttachmentV2({ await encryptAttachmentV2({
getAbsoluteAttachmentPath: options.getAbsoluteAttachmentPath,
keys, keys,
needIncrementalMac: false,
plaintext: { plaintext: {
stream: passthrough, stream: passthrough,
}, },
sink: createWriteStream(absoluteTargetPath), sink: createWriteStream(absoluteTargetPath),
getAbsoluteAttachmentPath: options.getAbsoluteAttachmentPath,
}), }),
]); ]);

View file

@ -61,6 +61,8 @@ import {
} from '../util/GoogleChrome'; } from '../util/GoogleChrome';
import { getLocalAttachmentUrl } from '../util/getLocalAttachmentUrl'; import { getLocalAttachmentUrl } from '../util/getLocalAttachmentUrl';
import { findRetryAfterTimeFromError } from './helpers/findRetryAfterTimeFromError'; import { findRetryAfterTimeFromError } from './helpers/findRetryAfterTimeFromError';
import { supportsIncrementalMac } from '../types/MIME';
import type { MIMEType } from '../types/MIME';
const MAX_CONCURRENT_JOBS = 3; const MAX_CONCURRENT_JOBS = 3;
const RETRY_CONFIG = { const RETRY_CONFIG = {
@ -269,8 +271,17 @@ async function backupStandardAttachment(
) { ) {
const jobIdForLogging = getJobIdForLogging(job); const jobIdForLogging = getJobIdForLogging(job);
const logId = `AttachmentBackupManager.backupStandardAttachment(${jobIdForLogging})`; const logId = `AttachmentBackupManager.backupStandardAttachment(${jobIdForLogging})`;
const { path, transitCdnInfo, iv, digest, keys, size, version, localKey } = const {
job.data; contentType,
digest,
iv,
keys,
localKey,
path,
size,
transitCdnInfo,
version,
} = job.data;
const mediaId = getMediaIdFromMediaName(job.mediaName); const mediaId = getMediaIdFromMediaName(job.mediaName);
const backupKeyMaterial = deriveBackupMediaKeyMaterial( const backupKeyMaterial = deriveBackupMediaKeyMaterial(
@ -326,14 +337,15 @@ async function backupStandardAttachment(
log.info(`${logId}: uploading to transit tier`); log.info(`${logId}: uploading to transit tier`);
const uploadResult = await uploadToTransitTier({ const uploadResult = await uploadToTransitTier({
absolutePath, absolutePath,
version, contentType,
localKey,
size,
keys,
iv,
digest,
logPrefix: logId,
dependencies, dependencies,
digest,
iv,
keys,
localKey,
logPrefix: logId,
size,
version,
}); });
log.info(`${logId}: copying to backup tier`); log.info(`${logId}: copying to backup tier`);
@ -386,11 +398,11 @@ async function backupThumbnailAttachment(
let thumbnail: CreatedThumbnailType; let thumbnail: CreatedThumbnailType;
const fullsizeUrl = getLocalAttachmentUrl({ const fullsizeUrl = getLocalAttachmentUrl({
contentType,
localKey,
path: fullsizePath, path: fullsizePath,
size: fullsizeSize, size: fullsizeSize,
contentType,
version, version,
localKey,
}); });
if (isVideoTypeSupported(contentType)) { if (isVideoTypeSupported(contentType)) {
@ -423,17 +435,17 @@ async function backupThumbnailAttachment(
log.info(`${logId}: uploading thumbnail to transit tier`); log.info(`${logId}: uploading thumbnail to transit tier`);
const uploadResult = await uploadThumbnailToTransitTier({ const uploadResult = await uploadThumbnailToTransitTier({
data: thumbnail.data, data: thumbnail.data,
dependencies,
keys: toBase64(Buffer.concat([aesKey, macKey])), keys: toBase64(Buffer.concat([aesKey, macKey])),
logPrefix: logId, logPrefix: logId,
dependencies,
}); });
log.info(`${logId}: copying thumbnail to backup tier`); log.info(`${logId}: copying thumbnail to backup tier`);
await copyToBackupTier({ await copyToBackupTier({
cdnKey: uploadResult.cdnKey, cdnKey: uploadResult.cdnKey,
cdnNumber: uploadResult.cdnNumber, cdnNumber: uploadResult.cdnNumber,
size: thumbnail.data.byteLength,
mediaId: mediaId.string, mediaId: mediaId.string,
size: thumbnail.data.byteLength,
...backupKeyMaterial, ...backupKeyMaterial,
dependencies, dependencies,
}); });
@ -441,17 +453,18 @@ async function backupThumbnailAttachment(
type UploadToTransitTierArgsType = { type UploadToTransitTierArgsType = {
absolutePath: string; absolutePath: string;
iv: string; contentType: MIMEType;
digest: string;
keys: string;
version?: AttachmentType['version'];
localKey?: string;
size: number;
logPrefix: string;
dependencies: { dependencies: {
decryptAttachmentV2ToSink: typeof decryptAttachmentV2ToSink; decryptAttachmentV2ToSink: typeof decryptAttachmentV2ToSink;
encryptAndUploadAttachment: typeof encryptAndUploadAttachment; encryptAndUploadAttachment: typeof encryptAndUploadAttachment;
}; };
digest: string;
iv: string;
keys: string;
localKey?: string;
logPrefix: string;
size: number;
version?: AttachmentType['version'];
}; };
type UploadResponseType = { type UploadResponseType = {
@ -461,15 +474,18 @@ type UploadResponseType = {
}; };
async function uploadToTransitTier({ async function uploadToTransitTier({
absolutePath, absolutePath,
keys, contentType,
version,
localKey,
size,
iv,
digest,
logPrefix,
dependencies, dependencies,
digest,
iv,
keys,
localKey,
logPrefix,
size,
version,
}: UploadToTransitTierArgsType): Promise<UploadResponseType> { }: UploadToTransitTierArgsType): Promise<UploadResponseType> {
const needIncrementalMac = supportsIncrementalMac(contentType);
try { try {
if (version === 2) { if (version === 2) {
strictAssert( strictAssert(
@ -484,8 +500,8 @@ async function uploadToTransitTier({
const [, result] = await Promise.all([ const [, result] = await Promise.all([
dependencies.decryptAttachmentV2ToSink( dependencies.decryptAttachmentV2ToSink(
{ {
idForLogging: 'uploadToTransitTier',
ciphertextPath: absolutePath, ciphertextPath: absolutePath,
idForLogging: 'uploadToTransitTier',
keysBase64: localKey, keysBase64: localKey,
size, size,
type: 'local', type: 'local',
@ -493,13 +509,14 @@ async function uploadToTransitTier({
sink sink
), ),
dependencies.encryptAndUploadAttachment({ dependencies.encryptAndUploadAttachment({
plaintext: { stream: sink },
keys: fromBase64(keys),
dangerousIv: { dangerousIv: {
reason: 'reencrypting-for-backup', reason: 'reencrypting-for-backup',
iv: fromBase64(iv), iv: fromBase64(iv),
digestToMatch: fromBase64(digest), digestToMatch: fromBase64(digest),
}, },
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { stream: sink, size },
uploadType: 'backup', uploadType: 'backup',
}), }),
]); ]);
@ -509,13 +526,14 @@ async function uploadToTransitTier({
// Legacy attachments // Legacy attachments
return dependencies.encryptAndUploadAttachment({ return dependencies.encryptAndUploadAttachment({
plaintext: { absolutePath },
keys: fromBase64(keys),
dangerousIv: { dangerousIv: {
reason: 'reencrypting-for-backup', reason: 'reencrypting-for-backup',
iv: fromBase64(iv), iv: fromBase64(iv),
digestToMatch: fromBase64(digest), digestToMatch: fromBase64(digest),
}, },
keys: fromBase64(keys),
needIncrementalMac,
plaintext: { absolutePath },
uploadType: 'backup', uploadType: 'backup',
}); });
} catch (error) { } catch (error) {
@ -545,6 +563,7 @@ async function uploadThumbnailToTransitTier({
const uploadResult = await dependencies.encryptAndUploadAttachment({ const uploadResult = await dependencies.encryptAndUploadAttachment({
plaintext: { data }, plaintext: { data },
keys: fromBase64(keys), keys: fromBase64(keys),
needIncrementalMac: false,
uploadType: 'backup', uploadType: 'backup',
}); });
return uploadResult; return uploadResult;

View file

@ -27,6 +27,7 @@ const UNPROCESSED_ATTACHMENT: Proto.IAttachmentPointer = {
key: new Uint8Array([1, 2, 3]), key: new Uint8Array([1, 2, 3]),
digest: new Uint8Array([4, 5, 6]), digest: new Uint8Array([4, 5, 6]),
contentType: IMAGE_GIF, contentType: IMAGE_GIF,
incrementalMac: new Uint8Array(),
size: 34, size: 34,
}; };
@ -36,6 +37,7 @@ const PROCESSED_ATTACHMENT: ProcessedAttachment = {
key: 'AQID', key: 'AQID',
digest: 'BAUG', digest: 'BAUG',
contentType: IMAGE_GIF, contentType: IMAGE_GIF,
incrementalMac: undefined,
size: 34, size: 34,
}; };
@ -84,6 +86,27 @@ describe('processDataMessage', () => {
]); ]);
}); });
it('should process attachments with incrementalMac/chunkSize', () => {
const out = check({
attachments: [
{
...UNPROCESSED_ATTACHMENT,
incrementalMac: new Uint8Array([0, 0, 0]),
chunkSize: 2,
},
],
});
assert.deepStrictEqual(out.attachments, [
{
...PROCESSED_ATTACHMENT,
chunkSize: 2,
downloadPath: 'random-path',
incrementalMac: 'AAAA',
},
]);
});
it('should throw on too many attachments', () => { it('should throw on too many attachments', () => {
const attachments: Array<Proto.IAttachmentPointer> = []; const attachments: Array<Proto.IAttachmentPointer> = [];
for (let i = 0; i < ATTACHMENT_MAX + 1; i += 1) { for (let i = 0; i < ATTACHMENT_MAX + 1; i += 1) {

View file

@ -44,10 +44,11 @@ describe('ContactsParser', () => {
const keys = generateKeys(); const keys = generateKeys();
({ path } = await encryptAttachmentV2ToDisk({ ({ path } = await encryptAttachmentV2ToDisk({
plaintext: { data },
keys, keys,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
plaintext: { data },
})); }));
const contacts = await parseContactsV2({ const contacts = await parseContactsV2({

View file

@ -1,11 +1,13 @@
// Copyright 2015 Signal Messenger, LLC // Copyright 2015 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { readFileSync, unlinkSync, writeFileSync } from 'fs'; import { readFileSync, unlinkSync, writeFileSync } from 'fs';
import { join } from 'path'; import { join } from 'path';
import { createCipheriv } from 'crypto'; import { createCipheriv } from 'crypto';
import { assert } from 'chai';
import { isNumber } from 'lodash';
import * as log from '../logging/log'; import * as log from '../logging/log';
import * as Bytes from '../Bytes'; import * as Bytes from '../Bytes';
import * as Curve from '../Curve'; import * as Curve from '../Curve';
@ -584,6 +586,8 @@ describe('Crypto', () => {
...splitKeys(keys), ...splitKeys(keys),
size: FILE_CONTENTS.byteLength, size: FILE_CONTENTS.byteLength,
theirDigest: encryptedAttachment.digest, theirDigest: encryptedAttachment.digest,
theirIncrementalMac: undefined,
theirChunkSize: undefined,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}); });
@ -611,6 +615,7 @@ describe('Crypto', () => {
plaintextHash, plaintextHash,
encryptionKeys, encryptionKeys,
dangerousIv, dangerousIv,
modifyIncrementalMac,
overrideSize, overrideSize,
}: { }: {
path?: string; path?: string;
@ -618,6 +623,7 @@ describe('Crypto', () => {
plaintextHash?: Uint8Array; plaintextHash?: Uint8Array;
encryptionKeys?: Uint8Array; encryptionKeys?: Uint8Array;
dangerousIv?: HardcodedIVForEncryptionType; dangerousIv?: HardcodedIVForEncryptionType;
modifyIncrementalMac?: boolean;
overrideSize?: number; overrideSize?: number;
}): Promise<DecryptedAttachmentV2> { }): Promise<DecryptedAttachmentV2> {
let plaintextPath; let plaintextPath;
@ -631,12 +637,22 @@ describe('Crypto', () => {
dangerousIv, dangerousIv,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
}); });
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path encryptedAttachment.path
); );
const macLength = encryptedAttachment.incrementalMac?.length;
if (
modifyIncrementalMac &&
isNumber(macLength) &&
encryptedAttachment.incrementalMac
) {
encryptedAttachment.incrementalMac[macLength / 2] += 1;
}
const decryptedAttachment = await decryptAttachmentV2({ const decryptedAttachment = await decryptAttachmentV2({
type: 'standard', type: 'standard',
ciphertextPath, ciphertextPath,
@ -644,6 +660,8 @@ describe('Crypto', () => {
...splitKeys(keys), ...splitKeys(keys),
size: overrideSize ?? data.byteLength, size: overrideSize ?? data.byteLength,
theirDigest: encryptedAttachment.digest, theirDigest: encryptedAttachment.digest,
theirIncrementalMac: encryptedAttachment.incrementalMac,
theirChunkSize: encryptedAttachment.chunkSize,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}); });
@ -736,6 +754,25 @@ describe('Crypto', () => {
unlinkSync(sourcePath); unlinkSync(sourcePath);
} }
}); });
it('v2 fails decrypt for large disk file if incrementalMac is wrong', async () => {
const sourcePath = join(tempDir, 'random');
const data = getRandomBytes(5 * 1024 * 1024);
const plaintextHash = sha256(data);
writeFileSync(sourcePath, data);
try {
await assert.isRejected(
testV2RoundTripData({
path: sourcePath,
data,
plaintextHash,
modifyIncrementalMac: true,
}),
/Corrupted/
);
} finally {
unlinkSync(sourcePath);
}
});
it('v2 roundtrips large file from memory', async () => { it('v2 roundtrips large file from memory', async () => {
// Get sufficient large data to have more than 64kb of padding and // Get sufficient large data to have more than 64kb of padding and
@ -785,6 +822,7 @@ describe('Crypto', () => {
plaintext: { data: FILE_CONTENTS }, plaintext: { data: FILE_CONTENTS },
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
}); });
await testV2RoundTripData({ await testV2RoundTripData({
@ -826,6 +864,7 @@ describe('Crypto', () => {
plaintext: { absolutePath: FILE_PATH }, plaintext: { absolutePath: FILE_PATH },
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
}); });
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path encryptedAttachment.path
@ -882,6 +921,7 @@ describe('Crypto', () => {
dangerousIv: { iv: dangerousTestOnlyIv, reason: 'test' }, dangerousIv: { iv: dangerousTestOnlyIv, reason: 'test' },
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
}); });
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachmentV2.path encryptedAttachmentV2.path
@ -918,6 +958,7 @@ describe('Crypto', () => {
plaintext: { absolutePath: plaintextAbsolutePath }, plaintext: { absolutePath: plaintextAbsolutePath },
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: true,
}); });
innerCiphertextPath = innerCiphertextPath =
window.Signal.Migrations.getAbsoluteAttachmentPath( window.Signal.Migrations.getAbsoluteAttachmentPath(
@ -931,6 +972,7 @@ describe('Crypto', () => {
dangerousTestOnlySkipPadding: true, dangerousTestOnlySkipPadding: true,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
}); });
outerCiphertextPath = outerCiphertextPath =
@ -969,6 +1011,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys), ...splitKeys(innerKeys),
size: FILE_CONTENTS.byteLength, size: FILE_CONTENTS.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest, theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: splitKeys(outerKeys), outerEncryption: splitKeys(outerKeys),
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
@ -1025,6 +1070,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys), ...splitKeys(innerKeys),
size: data.byteLength, size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest, theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: splitKeys(outerKeys), outerEncryption: splitKeys(outerKeys),
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
@ -1075,6 +1123,9 @@ describe('Crypto', () => {
...splitKeys(innerKeys), ...splitKeys(innerKeys),
size: data.byteLength, size: data.byteLength,
theirDigest: encryptResult.innerEncryptedAttachment.digest, theirDigest: encryptResult.innerEncryptedAttachment.digest,
theirIncrementalMac:
encryptResult.innerEncryptedAttachment.incrementalMac,
theirChunkSize: encryptResult.innerEncryptedAttachment.chunkSize,
outerEncryption: { outerEncryption: {
aesKey: splitKeys(outerKeys).aesKey, aesKey: splitKeys(outerKeys).aesKey,
macKey: splitKeys(innerKeys).macKey, // wrong mac! macKey: splitKeys(innerKeys).macKey, // wrong mac!

View file

@ -107,6 +107,7 @@ describe('AttachmentBackupManager/JobManager', function attachmentBackupManager(
absolutePath: join(__dirname, '../../../fixtures/cat-gif.mp4'), absolutePath: join(__dirname, '../../../fixtures/cat-gif.mp4'),
}, },
keys: Bytes.fromBase64(LOCAL_ENCRYPTION_KEYS), keys: Bytes.fromBase64(LOCAL_ENCRYPTION_KEYS),
needIncrementalMac: false,
sink: createWriteStream(absolutePath), sink: createWriteStream(absolutePath),
getAbsoluteAttachmentPath, getAbsoluteAttachmentPath,
}); });

View file

@ -40,6 +40,7 @@ describe('utils/ensureAttachmentIsReencryptable', async () => {
}, },
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
needIncrementalMac: false,
}); });
digest = encrypted.digest; digest = encrypted.digest;
iv = encrypted.iv; iv = encrypted.iv;

View file

@ -161,6 +161,7 @@ describe('attachments', function (this: Mocha.Suite) {
}, },
getAbsoluteAttachmentPath: relativePath => getAbsoluteAttachmentPath: relativePath =>
bootstrap.getAbsoluteAttachmentPath(relativePath), bootstrap.getAbsoluteAttachmentPath(relativePath),
needIncrementalMac: false,
}); });
const ciphertextCatWithNonZeroPadding = readFileSync( const ciphertextCatWithNonZeroPadding = readFileSync(

View file

@ -16,6 +16,7 @@ import { dropNull } from '../util/dropNull';
import { decryptAttachmentV2ToSink } from '../AttachmentCrypto'; import { decryptAttachmentV2ToSink } from '../AttachmentCrypto';
import Avatar = Proto.ContactDetails.IAvatar; import Avatar = Proto.ContactDetails.IAvatar;
import { stringToMIMEType } from '../types/MIME';
const { Reader } = protobuf; const { Reader } = protobuf;
@ -152,9 +153,13 @@ export class ParseContactsTransform extends Transform {
// eslint-disable-next-line no-await-in-loop // eslint-disable-next-line no-await-in-loop
await window.Signal.Migrations.writeNewAttachmentData(avatarData); await window.Signal.Migrations.writeNewAttachmentData(avatarData);
const contentType = this.activeContact.avatar?.contentType;
const prepared = prepareContact(this.activeContact, { const prepared = prepareContact(this.activeContact, {
...this.activeContact.avatar, ...this.activeContact.avatar,
...local, ...local,
contentType: contentType
? stringToMIMEType(contentType)
: undefined,
hash, hash,
}); });
if (prepared) { if (prepared) {

View file

@ -120,6 +120,8 @@ export type ProcessedAttachment = {
textAttachment?: Omit<TextAttachmentType, 'preview'>; textAttachment?: Omit<TextAttachmentType, 'preview'>;
backupLocator?: AttachmentType['backupLocator']; backupLocator?: AttachmentType['backupLocator'];
downloadPath?: string; downloadPath?: string;
incrementalMac?: string;
chunkSize?: number;
}; };
export type ProcessedGroupV2Context = { export type ProcessedGroupV2Context = {

View file

@ -113,7 +113,7 @@ export async function downloadAttachment(
): Promise<ReencryptedAttachmentV2 & { size?: number }> { ): Promise<ReencryptedAttachmentV2 & { size?: number }> {
const logId = `downloadAttachment/${options.logPrefix ?? ''}`; const logId = `downloadAttachment/${options.logPrefix ?? ''}`;
const { digest, key, size } = attachment; const { chunkSize, digest, incrementalMac, key, size } = attachment;
strictAssert(digest, `${logId}: missing digest`); strictAssert(digest, `${logId}: missing digest`);
strictAssert(key, `${logId}: missing key`); strictAssert(key, `${logId}: missing key`);
@ -232,6 +232,10 @@ export async function downloadAttachment(
macKey, macKey,
size, size,
theirDigest: Bytes.fromBase64(digest), theirDigest: Bytes.fromBase64(digest),
theirIncrementalMac: incrementalMac
? Bytes.fromBase64(incrementalMac)
: undefined,
theirChunkSize: chunkSize,
outerEncryption: outerEncryption:
mediaTier === 'backup' mediaTier === 'backup'
? getBackupMediaOuterEncryptionKeyMaterial(attachment) ? getBackupMediaOuterEncryptionKeyMaterial(attachment)

View file

@ -54,7 +54,8 @@ export function processAttachment(
const { cdnId } = attachment; const { cdnId } = attachment;
const hasCdnId = Long.isLong(cdnId) ? !cdnId.isZero() : Boolean(cdnId); const hasCdnId = Long.isLong(cdnId) ? !cdnId.isZero() : Boolean(cdnId);
const { clientUuid, contentType, digest, key, size } = attachment; const { clientUuid, contentType, digest, incrementalMac, key, size } =
attachment;
if (!isNumber(size)) { if (!isNumber(size)) {
throw new Error('Missing size on incoming attachment!'); throw new Error('Missing size on incoming attachment!');
} }
@ -63,12 +64,17 @@ export function processAttachment(
...shallowDropNull(attachment), ...shallowDropNull(attachment),
cdnId: hasCdnId ? String(cdnId) : undefined, cdnId: hasCdnId ? String(cdnId) : undefined,
clientUuid: clientUuid ? bytesToUuid(clientUuid) : undefined, clientUuid: Bytes.isNotEmpty(clientUuid)
? bytesToUuid(clientUuid)
: undefined,
contentType: contentType contentType: contentType
? stringToMIMEType(contentType) ? stringToMIMEType(contentType)
: APPLICATION_OCTET_STREAM, : APPLICATION_OCTET_STREAM,
digest: digest ? Bytes.toBase64(digest) : undefined, digest: Bytes.isNotEmpty(digest) ? Bytes.toBase64(digest) : undefined,
key: key ? Bytes.toBase64(key) : undefined, incrementalMac: Bytes.isNotEmpty(incrementalMac)
? Bytes.toBase64(incrementalMac)
: undefined,
key: Bytes.isNotEmpty(key) ? Bytes.toBase64(key) : undefined,
size, size,
}; };
} }

View file

@ -130,6 +130,7 @@ export type AddressableAttachmentType = Readonly<{
path: string; path: string;
localKey?: string; localKey?: string;
size?: number; size?: number;
contentType: MIME.MIMEType;
// In-memory data, for outgoing attachments that are not saved to disk. // In-memory data, for outgoing attachments that are not saved to disk.
data?: Uint8Array; data?: Uint8Array;

View file

@ -48,3 +48,6 @@ export const isAudio = (value: string): value is MIMEType =>
Boolean(value) && value.startsWith('audio/') && !value.endsWith('aiff'); Boolean(value) && value.startsWith('audio/') && !value.endsWith('aiff');
export const isLongMessage = (value: unknown): value is MIMEType => export const isLongMessage = (value: unknown): value is MIMEType =>
value === LONG_MESSAGE; value === LONG_MESSAGE;
export const supportsIncrementalMac = (value: unknown): boolean => {
return value === VIDEO_MP4;
};

View file

@ -0,0 +1,52 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { Transform } from 'stream';
import { DigestingWritable } from '@signalapp/libsignal-client/dist/incremental_mac';
import type { ChunkSizeChoice } from '@signalapp/libsignal-client/dist/incremental_mac';
type CallbackType = (error?: Error | null) => void;
export class DigestingPassThrough extends Transform {
private digester: DigestingWritable;
constructor(key: Buffer, sizeChoice: ChunkSizeChoice) {
super();
this.digester = new DigestingWritable(key, sizeChoice);
// We handle errors coming from write/end
this.digester.on('error', () => {
/* noop */
});
}
getFinalDigest(): Buffer {
return this.digester.getFinalDigest();
}
public override _transform(
data: Buffer,
enc: BufferEncoding,
callback: CallbackType
): void {
this.push(data);
this.digester.write(data, enc, err => {
if (err) {
return callback(err);
}
callback();
});
}
public override _final(callback: CallbackType): void {
this.digester.end((err?: Error) => {
if (err) {
return callback(err);
}
callback();
});
}
}

View file

@ -81,11 +81,13 @@ export type CdnFieldsType = Pick<
| 'cdnId' | 'cdnId'
| 'cdnKey' | 'cdnKey'
| 'cdnNumber' | 'cdnNumber'
| 'key'
| 'digest' | 'digest'
| 'iv' | 'incrementalMac'
| 'plaintextHash' | 'incrementalMacChunkSize'
| 'isReencryptableToSameDigest' | 'isReencryptableToSameDigest'
| 'iv'
| 'key'
| 'plaintextHash'
>; >;
export function copyCdnFields( export function copyCdnFields(
@ -98,10 +100,14 @@ export function copyCdnFields(
cdnId: dropNull(uploaded.cdnId)?.toString(), cdnId: dropNull(uploaded.cdnId)?.toString(),
cdnKey: uploaded.cdnKey, cdnKey: uploaded.cdnKey,
cdnNumber: dropNull(uploaded.cdnNumber), cdnNumber: dropNull(uploaded.cdnNumber),
key: Bytes.toBase64(uploaded.key),
iv: Bytes.toBase64(uploaded.iv),
digest: Bytes.toBase64(uploaded.digest), digest: Bytes.toBase64(uploaded.digest),
plaintextHash: uploaded.plaintextHash, incrementalMac: uploaded.incrementalMac
? Bytes.toBase64(uploaded.incrementalMac)
: undefined,
incrementalMacChunkSize: dropNull(uploaded.chunkSize),
isReencryptableToSameDigest: uploaded.isReencryptableToSameDigest, isReencryptableToSameDigest: uploaded.isReencryptableToSameDigest,
iv: Bytes.toBase64(uploaded.iv),
key: Bytes.toBase64(uploaded.key),
plaintextHash: uploaded.plaintextHash,
}; };
} }

View file

@ -94,6 +94,7 @@ export async function attemptToReencryptToOriginalDigest(
attachment.path attachment.path
), ),
}, },
needIncrementalMac: false,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}); });
@ -117,6 +118,7 @@ export async function attemptToReencryptToOriginalDigest(
encryptAttachmentV2({ encryptAttachmentV2({
plaintext: { plaintext: {
stream: passthrough, stream: passthrough,
size: attachment.size,
}, },
keys: fromBase64(key), keys: fromBase64(key),
dangerousIv: { dangerousIv: {
@ -124,6 +126,7 @@ export async function attemptToReencryptToOriginalDigest(
reason: 'reencrypting-for-backup', reason: 'reencrypting-for-backup',
digestToMatch: fromBase64(digest), digestToMatch: fromBase64(digest),
}, },
needIncrementalMac: false,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}), }),
@ -146,6 +149,7 @@ export async function generateNewEncryptionInfoForAttachment(
attachment.path attachment.path
), ),
}, },
needIncrementalMac: false,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}); });
@ -170,7 +174,9 @@ export async function generateNewEncryptionInfoForAttachment(
keys: newKeys, keys: newKeys,
plaintext: { plaintext: {
stream: passthrough, stream: passthrough,
size: attachment.size,
}, },
needIncrementalMac: false,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
}), }),

View file

@ -5,7 +5,7 @@ import type {
AttachmentWithHydratedData, AttachmentWithHydratedData,
UploadedAttachmentType, UploadedAttachmentType,
} from '../types/Attachment'; } from '../types/Attachment';
import { MIMETypeToString } from '../types/MIME'; import { MIMETypeToString, supportsIncrementalMac } from '../types/MIME';
import { getRandomBytes } from '../Crypto'; import { getRandomBytes } from '../Crypto';
import { strictAssert } from './assert'; import { strictAssert } from './assert';
import { backupsService } from '../services/backups'; import { backupsService } from '../services/backups';
@ -31,10 +31,12 @@ export async function uploadAttachment(
strictAssert(server, 'WebAPI must be initialized'); strictAssert(server, 'WebAPI must be initialized');
const keys = getRandomBytes(64); const keys = getRandomBytes(64);
const needIncrementalMac = supportsIncrementalMac(attachment.contentType);
const { cdnKey, cdnNumber, encrypted } = await encryptAndUploadAttachment({ const { cdnKey, cdnNumber, encrypted } = await encryptAndUploadAttachment({
plaintext: { data: attachment.data },
keys, keys,
needIncrementalMac,
plaintext: { data: attachment.data },
uploadType: 'standard', uploadType: 'standard',
}); });
@ -50,6 +52,8 @@ export async function uploadAttachment(
size: attachment.data.byteLength, size: attachment.data.byteLength,
digest: encrypted.digest, digest: encrypted.digest,
plaintextHash: encrypted.plaintextHash, plaintextHash: encrypted.plaintextHash,
incrementalMac: encrypted.incrementalMac,
chunkSize: encrypted.chunkSize,
contentType: MIMETypeToString(attachment.contentType), contentType: MIMETypeToString(attachment.contentType),
fileName, fileName,
@ -63,14 +67,16 @@ export async function uploadAttachment(
} }
export async function encryptAndUploadAttachment({ export async function encryptAndUploadAttachment({
plaintext,
keys,
dangerousIv, dangerousIv,
keys,
needIncrementalMac,
plaintext,
uploadType, uploadType,
}: { }: {
plaintext: PlaintextSourceType;
keys: Uint8Array;
dangerousIv?: HardcodedIVForEncryptionType; dangerousIv?: HardcodedIVForEncryptionType;
keys: Uint8Array;
needIncrementalMac: boolean;
plaintext: PlaintextSourceType;
uploadType: 'standard' | 'backup'; uploadType: 'standard' | 'backup';
}): Promise<{ }): Promise<{
cdnKey: string; cdnKey: string;
@ -98,11 +104,12 @@ export async function encryptAndUploadAttachment({
} }
const encrypted = await encryptAttachmentV2ToDisk({ const encrypted = await encryptAttachmentV2ToDisk({
plaintext,
keys,
dangerousIv, dangerousIv,
getAbsoluteAttachmentPath: getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath, window.Signal.Migrations.getAbsoluteAttachmentPath,
keys,
needIncrementalMac,
plaintext,
}); });
absoluteCiphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath( absoluteCiphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(