Enable downloading attachments from backup CDN
This commit is contained in:
parent
2964006b79
commit
1e8047cf73
21 changed files with 989 additions and 385 deletions
|
@ -5,7 +5,7 @@ import { assert } from 'chai';
|
|||
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
import { randomBytes } from 'crypto';
|
||||
import { createCipheriv, randomBytes } from 'crypto';
|
||||
import * as log from '../logging/log';
|
||||
import * as Bytes from '../Bytes';
|
||||
import * as Curve from '../Curve';
|
||||
|
@ -34,12 +34,15 @@ import {
|
|||
encryptAttachment,
|
||||
decryptAttachmentV1,
|
||||
padAndEncryptAttachment,
|
||||
CipherType,
|
||||
} from '../Crypto';
|
||||
import {
|
||||
KEY_SET_LENGTH,
|
||||
_generateAttachmentIv,
|
||||
decryptAttachmentV2,
|
||||
encryptAttachmentV2,
|
||||
getAesCbcCiphertextLength,
|
||||
splitKeys,
|
||||
} from '../AttachmentCrypto';
|
||||
import { createTempDir, deleteTempDir } from '../updater/common';
|
||||
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
|
||||
|
@ -576,8 +579,8 @@ describe('Crypto', () => {
|
|||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -613,8 +616,8 @@ describe('Crypto', () => {
|
|||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -661,8 +664,8 @@ describe('Crypto', () => {
|
|||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
@ -770,5 +773,205 @@ describe('Crypto', () => {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
describe('decryptAttachmentV2 with outer layer of encryption', () => {
|
||||
async function doubleEncrypt({
|
||||
plaintextAbsolutePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
}: {
|
||||
plaintextAbsolutePath: string;
|
||||
innerKeys: Uint8Array;
|
||||
outerKeys: Uint8Array;
|
||||
}) {
|
||||
let innerCiphertextPath;
|
||||
let outerCiphertextPath;
|
||||
let innerEncryptedAttachment;
|
||||
try {
|
||||
innerEncryptedAttachment = await encryptAttachmentV2({
|
||||
keys: innerKeys,
|
||||
plaintextAbsolutePath,
|
||||
});
|
||||
innerCiphertextPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
innerEncryptedAttachment.path
|
||||
);
|
||||
|
||||
const outerEncryptedAttachment = await encryptAttachmentV2({
|
||||
keys: outerKeys,
|
||||
plaintextAbsolutePath: innerCiphertextPath,
|
||||
// We (and the server!) don't pad the second layer
|
||||
dangerousTestOnlySkipPadding: true,
|
||||
});
|
||||
|
||||
outerCiphertextPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
outerEncryptedAttachment.path
|
||||
);
|
||||
} finally {
|
||||
if (innerCiphertextPath) {
|
||||
unlinkSync(innerCiphertextPath);
|
||||
}
|
||||
}
|
||||
return {
|
||||
outerCiphertextPath,
|
||||
innerEncryptedAttachment,
|
||||
};
|
||||
}
|
||||
|
||||
it('v2 roundtrips smaller file (all on disk)', async () => {
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: splitKeys(outerKeys),
|
||||
});
|
||||
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptResult.innerEncryptedAttachment.plaintextHash,
|
||||
GHOST_KITTY_HASH
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptResult.innerEncryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips random data (all on disk)', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: splitKeys(outerKeys),
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 fails if outer encryption mac is wrong', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const outerKeys = generateAttachmentKeys();
|
||||
const innerKeys = generateAttachmentKeys();
|
||||
let outerCiphertextPath;
|
||||
|
||||
try {
|
||||
const encryptResult = await doubleEncrypt({
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
innerKeys,
|
||||
outerKeys,
|
||||
});
|
||||
outerCiphertextPath = encryptResult.outerCiphertextPath;
|
||||
|
||||
await assert.isRejected(
|
||||
decryptAttachmentV2({
|
||||
ciphertextPath: outerCiphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(innerKeys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptResult.innerEncryptedAttachment.digest,
|
||||
outerEncryption: {
|
||||
aesKey: splitKeys(outerKeys).aesKey,
|
||||
macKey: splitKeys(innerKeys).macKey, // wrong mac!
|
||||
},
|
||||
}),
|
||||
/Bad outer encryption MAC/
|
||||
);
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (outerCiphertextPath) {
|
||||
unlinkSync(outerCiphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAesCbcCiphertextLength', () => {
|
||||
function encrypt(length: number) {
|
||||
const cipher = createCipheriv(
|
||||
CipherType.AES256CBC,
|
||||
getRandomBytes(32),
|
||||
getRandomBytes(16)
|
||||
);
|
||||
const encrypted = cipher.update(Buffer.alloc(length));
|
||||
return Buffer.concat([encrypted, cipher.final()]);
|
||||
}
|
||||
it('calculates cipherTextLength correctly', () => {
|
||||
for (let i = 0; i < 128; i += 1) {
|
||||
assert.strictEqual(getAesCbcCiphertextLength(i), encrypt(i).length);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { assert } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
import { getMediaNameForBackup } from '../../util/attachments/getMediaNameForBackup';
|
||||
import { IMAGE_PNG } from '../../types/MIME';
|
||||
import { sha256 } from '../../Crypto';
|
||||
import { DAY } from '../../util/durations';
|
||||
|
||||
describe('getMediaNameForBackup', () => {
|
||||
const TEST_HASH = sha256(Buffer.from('testattachmentdata'));
|
||||
const TEST_HASH_BASE_64 =
|
||||
// calculated as Buffer.from(TEST_HASH).toString('base64')
|
||||
'ds5/U14lB2ziO90B7MldFTJUQdyw4qQ9y6Gnt9fmHL0=';
|
||||
|
||||
afterEach(function (this: Mocha.Context) {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it("should return base64 encoded plaintextHash if it's already been calculated", async () => {
|
||||
assert.strictEqual(
|
||||
await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
plaintextHash: Buffer.from(TEST_HASH).toString('hex'),
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
),
|
||||
TEST_HASH_BASE_64
|
||||
);
|
||||
});
|
||||
|
||||
it('should calculate hash from file on disk if plaintextHash has not yet been calculated', async () => {
|
||||
const stubbedGetHashFromDisk = sinon
|
||||
.stub()
|
||||
.callsFake(async (_path: string) =>
|
||||
Buffer.from(TEST_HASH).toString('hex')
|
||||
);
|
||||
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
path: 'path/to/file',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now(),
|
||||
{ getPlaintextHashForAttachmentOnDisk: stubbedGetHashFromDisk }
|
||||
);
|
||||
|
||||
assert.strictEqual(stubbedGetHashFromDisk.callCount, 1);
|
||||
assert.strictEqual(mediaName, TEST_HASH_BASE_64);
|
||||
});
|
||||
|
||||
it('should return temporary identifier if attachment is undownloaded but in attachment tier', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, 'senderAci_cdnKey');
|
||||
});
|
||||
|
||||
it('should return temporary identifier if undownloaded attachment has temporary error', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
error: true,
|
||||
key: 'attachmentkey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now()
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, 'senderAci_cdnKey');
|
||||
});
|
||||
|
||||
it('should return undefined if attachment is too old to be in attachment tier', async () => {
|
||||
const mediaName = await getMediaNameForBackup(
|
||||
{
|
||||
contentType: IMAGE_PNG,
|
||||
size: 100,
|
||||
cdnKey: 'cdnKey',
|
||||
},
|
||||
'senderAci',
|
||||
Date.now() - 31 * DAY
|
||||
);
|
||||
|
||||
assert.strictEqual(mediaName, undefined);
|
||||
});
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue