Use TUS for attachment uploads
Co-authored-by: trevor-signal <131492920+trevor-signal@users.noreply.github.com>
This commit is contained in:
parent
379b2e1c90
commit
73ebb9797d
9 changed files with 312 additions and 169 deletions
|
@ -1,7 +1,7 @@
|
||||||
// Copyright 2020 Signal Messenger, LLC
|
// Copyright 2020 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import { unlinkSync } from 'fs';
|
import { unlinkSync, createReadStream, createWriteStream } from 'fs';
|
||||||
import { open } from 'fs/promises';
|
import { open } from 'fs/promises';
|
||||||
import {
|
import {
|
||||||
createDecipheriv,
|
createDecipheriv,
|
||||||
|
@ -11,7 +11,7 @@ import {
|
||||||
randomBytes,
|
randomBytes,
|
||||||
} from 'crypto';
|
} from 'crypto';
|
||||||
import type { Decipher, Hash, Hmac } from 'crypto';
|
import type { Decipher, Hash, Hmac } from 'crypto';
|
||||||
import { PassThrough, Transform, type Writable } from 'stream';
|
import { PassThrough, Transform, type Writable, Readable } from 'stream';
|
||||||
import { pipeline } from 'stream/promises';
|
import { pipeline } from 'stream/promises';
|
||||||
import { ensureFile } from 'fs-extra';
|
import { ensureFile } from 'fs-extra';
|
||||||
import * as log from './logging/log';
|
import * as log from './logging/log';
|
||||||
|
@ -47,6 +47,7 @@ export function _generateAttachmentIv(): Uint8Array {
|
||||||
export type EncryptedAttachmentV2 = {
|
export type EncryptedAttachmentV2 = {
|
||||||
digest: Uint8Array;
|
digest: Uint8Array;
|
||||||
plaintextHash: string;
|
plaintextHash: string;
|
||||||
|
ciphertextSize: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DecryptedAttachmentV2 = {
|
export type DecryptedAttachmentV2 = {
|
||||||
|
@ -54,9 +55,13 @@ export type DecryptedAttachmentV2 = {
|
||||||
plaintextHash: string;
|
plaintextHash: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type PlaintextSourceType =
|
||||||
|
| { data: Uint8Array }
|
||||||
|
| { absolutePath: string };
|
||||||
|
|
||||||
type EncryptAttachmentV2PropsType = {
|
type EncryptAttachmentV2PropsType = {
|
||||||
|
plaintext: PlaintextSourceType;
|
||||||
keys: Readonly<Uint8Array>;
|
keys: Readonly<Uint8Array>;
|
||||||
plaintextAbsolutePath: string;
|
|
||||||
dangerousTestOnlyIv?: Readonly<Uint8Array>;
|
dangerousTestOnlyIv?: Readonly<Uint8Array>;
|
||||||
dangerousTestOnlySkipPadding?: boolean;
|
dangerousTestOnlySkipPadding?: boolean;
|
||||||
};
|
};
|
||||||
|
@ -69,21 +74,18 @@ export async function encryptAttachmentV2ToDisk(
|
||||||
const absoluteTargetPath =
|
const absoluteTargetPath =
|
||||||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||||
|
|
||||||
let writeFd;
|
await ensureFile(absoluteTargetPath);
|
||||||
|
|
||||||
let encryptResult: EncryptedAttachmentV2;
|
let encryptResult: EncryptedAttachmentV2;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await ensureFile(absoluteTargetPath);
|
|
||||||
writeFd = await open(absoluteTargetPath, 'w');
|
|
||||||
encryptResult = await encryptAttachmentV2({
|
encryptResult = await encryptAttachmentV2({
|
||||||
...args,
|
...args,
|
||||||
sink: writeFd.createWriteStream(),
|
sink: createWriteStream(absoluteTargetPath),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
safeUnlinkSync(absoluteTargetPath);
|
safeUnlinkSync(absoluteTargetPath);
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
|
||||||
await writeFd?.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -91,12 +93,11 @@ export async function encryptAttachmentV2ToDisk(
|
||||||
path: relativeTargetPath,
|
path: relativeTargetPath,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function encryptAttachmentV2({
|
export async function encryptAttachmentV2({
|
||||||
keys,
|
keys,
|
||||||
plaintextAbsolutePath,
|
plaintext,
|
||||||
dangerousTestOnlyIv,
|
dangerousTestOnlyIv,
|
||||||
dangerousTestOnlySkipPadding = false,
|
dangerousTestOnlySkipPadding,
|
||||||
sink,
|
sink,
|
||||||
}: EncryptAttachmentV2PropsType & {
|
}: EncryptAttachmentV2PropsType & {
|
||||||
sink?: Writable;
|
sink?: Writable;
|
||||||
|
@ -117,27 +118,29 @@ export async function encryptAttachmentV2({
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const iv = dangerousTestOnlyIv || _generateAttachmentIv();
|
const iv = dangerousTestOnlyIv || _generateAttachmentIv();
|
||||||
|
|
||||||
const plaintextHash = createHash(HashType.size256);
|
const plaintextHash = createHash(HashType.size256);
|
||||||
const digest = createHash(HashType.size256);
|
const digest = createHash(HashType.size256);
|
||||||
|
|
||||||
let readFd;
|
let ciphertextSize: number | undefined;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
try {
|
const source =
|
||||||
readFd = await open(plaintextAbsolutePath, 'r');
|
'data' in plaintext
|
||||||
} catch (cause) {
|
? Readable.from(plaintext.data)
|
||||||
throw new Error(`${logId}: Read path doesn't exist`, { cause });
|
: createReadStream(plaintext.absolutePath);
|
||||||
}
|
|
||||||
|
|
||||||
await pipeline(
|
await pipeline(
|
||||||
[
|
[
|
||||||
readFd.createReadStream(),
|
source,
|
||||||
peekAndUpdateHash(plaintextHash),
|
peekAndUpdateHash(plaintextHash),
|
||||||
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
|
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
|
||||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||||
prependIv(iv),
|
prependIv(iv),
|
||||||
appendMacStream(macKey),
|
appendMacStream(macKey),
|
||||||
peekAndUpdateHash(digest),
|
peekAndUpdateHash(digest),
|
||||||
|
measureSize(size => {
|
||||||
|
ciphertextSize = size;
|
||||||
|
}),
|
||||||
sink ?? new PassThrough().resume(),
|
sink ?? new PassThrough().resume(),
|
||||||
].filter(isNotNil)
|
].filter(isNotNil)
|
||||||
);
|
);
|
||||||
|
@ -147,8 +150,6 @@ export async function encryptAttachmentV2({
|
||||||
Errors.toLogFormat(error)
|
Errors.toLogFormat(error)
|
||||||
);
|
);
|
||||||
throw error;
|
throw error;
|
||||||
} finally {
|
|
||||||
await readFd?.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const ourPlaintextHash = plaintextHash.digest('hex');
|
const ourPlaintextHash = plaintextHash.digest('hex');
|
||||||
|
@ -164,9 +165,12 @@ export async function encryptAttachmentV2({
|
||||||
`${logId}: Failed to generate ourDigest!`
|
`${logId}: Failed to generate ourDigest!`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
strictAssert(ciphertextSize != null, 'Failed to measure ciphertext size!');
|
||||||
|
|
||||||
return {
|
return {
|
||||||
digest: ourDigest,
|
digest: ourDigest,
|
||||||
plaintextHash: ourPlaintextHash,
|
plaintextHash: ourPlaintextHash,
|
||||||
|
ciphertextSize,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -464,6 +468,18 @@ function trimPadding(size: number) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function measureSize(onComplete: (size: number) => void): Transform {
|
||||||
|
let totalBytes = 0;
|
||||||
|
const passthrough = new PassThrough();
|
||||||
|
passthrough.on('data', chunk => {
|
||||||
|
totalBytes += chunk.length;
|
||||||
|
});
|
||||||
|
passthrough.on('end', () => {
|
||||||
|
onComplete(totalBytes);
|
||||||
|
});
|
||||||
|
return passthrough;
|
||||||
|
}
|
||||||
|
|
||||||
export function getAttachmentCiphertextLength(plaintextLength: number): number {
|
export function getAttachmentCiphertextLength(plaintextLength: number): number {
|
||||||
const paddedPlaintextSize = logPadSize(plaintextLength);
|
const paddedPlaintextSize = logPadSize(plaintextLength);
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,6 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import { strictAssert } from '../../util/assert';
|
import { strictAssert } from '../../util/assert';
|
||||||
import { tusUpload } from '../../util/uploads/tusProtocol';
|
|
||||||
import { defaultFileReader } from '../../util/uploads/uploads';
|
|
||||||
import type {
|
import type {
|
||||||
WebAPIType,
|
WebAPIType,
|
||||||
AttachmentV3ResponseType,
|
AttachmentV3ResponseType,
|
||||||
|
@ -13,6 +11,7 @@ import type {
|
||||||
BackupListMediaResponseType,
|
BackupListMediaResponseType,
|
||||||
} from '../../textsecure/WebAPI';
|
} from '../../textsecure/WebAPI';
|
||||||
import type { BackupCredentials } from './credentials';
|
import type { BackupCredentials } from './credentials';
|
||||||
|
import { uploadFile } from '../../util/uploadAttachment';
|
||||||
|
|
||||||
export class BackupAPI {
|
export class BackupAPI {
|
||||||
private cachedBackupInfo: GetBackupInfoResponseType | undefined;
|
private cachedBackupInfo: GetBackupInfoResponseType | undefined;
|
||||||
|
@ -60,16 +59,10 @@ export class BackupAPI {
|
||||||
await this.credentials.getHeadersForToday()
|
await this.credentials.getHeadersForToday()
|
||||||
);
|
);
|
||||||
|
|
||||||
const fetchFn = this.server.createFetchForAttachmentUpload(form);
|
await uploadFile({
|
||||||
|
absoluteCiphertextPath: filePath,
|
||||||
await tusUpload({
|
ciphertextFileSize: fileSize,
|
||||||
endpoint: form.signedUploadLocation,
|
uploadForm: form,
|
||||||
headers: {},
|
|
||||||
fileName: form.key,
|
|
||||||
filePath,
|
|
||||||
fileSize,
|
|
||||||
reader: defaultFileReader,
|
|
||||||
fetchFn,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,11 @@ import { HOUR } from '../../util/durations';
|
||||||
import { CipherType, HashType } from '../../types/Crypto';
|
import { CipherType, HashType } from '../../types/Crypto';
|
||||||
import * as Errors from '../../types/errors';
|
import * as Errors from '../../types/errors';
|
||||||
import { constantTimeEqual } from '../../Crypto';
|
import { constantTimeEqual } from '../../Crypto';
|
||||||
import { getIvAndDecipher, getMacAndUpdateHmac } from '../../AttachmentCrypto';
|
import {
|
||||||
|
getIvAndDecipher,
|
||||||
|
getMacAndUpdateHmac,
|
||||||
|
measureSize,
|
||||||
|
} from '../../AttachmentCrypto';
|
||||||
import { BackupExportStream } from './export';
|
import { BackupExportStream } from './export';
|
||||||
import { BackupImportStream } from './import';
|
import { BackupImportStream } from './import';
|
||||||
import { getKeyMaterial } from './crypto';
|
import { getKeyMaterial } from './crypto';
|
||||||
|
@ -200,17 +204,8 @@ export class BackupsService {
|
||||||
|
|
||||||
const iv = randomBytes(IV_LENGTH);
|
const iv = randomBytes(IV_LENGTH);
|
||||||
|
|
||||||
const pass = new PassThrough();
|
|
||||||
|
|
||||||
let totalBytes = 0;
|
let totalBytes = 0;
|
||||||
|
|
||||||
// Pause the flow first so that the we respect backpressure. The
|
|
||||||
// `pipeline` call below will control the flow anyway.
|
|
||||||
pass.pause();
|
|
||||||
pass.on('data', chunk => {
|
|
||||||
totalBytes += chunk.length;
|
|
||||||
});
|
|
||||||
|
|
||||||
await pipeline(
|
await pipeline(
|
||||||
recordStream,
|
recordStream,
|
||||||
createGzip(),
|
createGzip(),
|
||||||
|
@ -218,7 +213,9 @@ export class BackupsService {
|
||||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||||
prependStream(iv),
|
prependStream(iv),
|
||||||
appendMacStream(macKey),
|
appendMacStream(macKey),
|
||||||
pass,
|
measureSize(size => {
|
||||||
|
totalBytes = size;
|
||||||
|
}),
|
||||||
sink
|
sink
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -11,10 +11,10 @@ import {
|
||||||
type AttachmentType,
|
type AttachmentType,
|
||||||
isDownloadableFromTransitTier,
|
isDownloadableFromTransitTier,
|
||||||
isDownloadableFromBackupTier,
|
isDownloadableFromBackupTier,
|
||||||
isDownloadedToLocalFile,
|
isAttachmentLocallySaved,
|
||||||
type AttachmentDownloadableFromTransitTier,
|
type AttachmentDownloadableFromTransitTier,
|
||||||
type AttachmentDownloadableFromBackupTier,
|
type AttachmentDownloadableFromBackupTier,
|
||||||
type DownloadedAttachment,
|
type LocallySavedAttachment,
|
||||||
type AttachmentReadyForBackup,
|
type AttachmentReadyForBackup,
|
||||||
} from '../../../types/Attachment';
|
} from '../../../types/Attachment';
|
||||||
import { Backups } from '../../../protobuf';
|
import { Backups } from '../../../protobuf';
|
||||||
|
@ -117,7 +117,7 @@ export function convertFilePointerToAttachment(
|
||||||
* along with the new keys.
|
* along with the new keys.
|
||||||
*/
|
*/
|
||||||
async function fixupAttachmentForBackup(
|
async function fixupAttachmentForBackup(
|
||||||
attachment: DownloadedAttachment
|
attachment: LocallySavedAttachment
|
||||||
): Promise<AttachmentReadyForBackup> {
|
): Promise<AttachmentReadyForBackup> {
|
||||||
const fixedUpAttachment = { ...attachment };
|
const fixedUpAttachment = { ...attachment };
|
||||||
const keyToUse = attachment.key ?? Bytes.toBase64(getRandomBytes(64));
|
const keyToUse = attachment.key ?? Bytes.toBase64(getRandomBytes(64));
|
||||||
|
@ -133,9 +133,11 @@ async function fixupAttachmentForBackup(
|
||||||
// encrypt this file in memory in order to calculate the digest
|
// encrypt this file in memory in order to calculate the digest
|
||||||
const { digest } = await encryptAttachmentV2({
|
const { digest } = await encryptAttachmentV2({
|
||||||
keys: Bytes.fromBase64(keyToUse),
|
keys: Bytes.fromBase64(keyToUse),
|
||||||
plaintextAbsolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
plaintext: {
|
||||||
attachment.path
|
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
),
|
attachment.path
|
||||||
|
),
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
digestToUse = Bytes.toBase64(digest);
|
digestToUse = Bytes.toBase64(digest);
|
||||||
|
@ -175,7 +177,7 @@ export async function convertAttachmentToFilePointer({
|
||||||
blurHash: attachment.blurHash,
|
blurHash: attachment.blurHash,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!isDownloadedToLocalFile(attachment)) {
|
if (!isAttachmentLocallySaved(attachment)) {
|
||||||
// 1. If the attachment is undownloaded, we cannot trust its digest / mediaName. Thus,
|
// 1. If the attachment is undownloaded, we cannot trust its digest / mediaName. Thus,
|
||||||
// we only include a BackupLocator if this attachment already had one (e.g. we
|
// we only include a BackupLocator if this attachment already had one (e.g. we
|
||||||
// restored it from a backup and it had a BackupLocator then, which means we have at
|
// restored it from a backup and it had a BackupLocator then, which means we have at
|
||||||
|
@ -212,7 +214,7 @@ export async function convertAttachmentToFilePointer({
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isDownloadedToLocalFile(attachment)) {
|
if (!isAttachmentLocallySaved(attachment)) {
|
||||||
return new Backups.FilePointer({
|
return new Backups.FilePointer({
|
||||||
...filePointerRootProps,
|
...filePointerRootProps,
|
||||||
invalidAttachmentLocator: getInvalidAttachmentLocator(),
|
invalidAttachmentLocator: getInvalidAttachmentLocator(),
|
||||||
|
|
|
@ -42,6 +42,7 @@ import {
|
||||||
decryptAttachmentV2,
|
decryptAttachmentV2,
|
||||||
encryptAttachmentV2ToDisk,
|
encryptAttachmentV2ToDisk,
|
||||||
getAesCbcCiphertextLength,
|
getAesCbcCiphertextLength,
|
||||||
|
getAttachmentCiphertextLength,
|
||||||
splitKeys,
|
splitKeys,
|
||||||
} from '../AttachmentCrypto';
|
} from '../AttachmentCrypto';
|
||||||
import { createTempDir, deleteTempDir } from '../updater/common';
|
import { createTempDir, deleteTempDir } from '../updater/common';
|
||||||
|
@ -531,6 +532,7 @@ describe('Crypto', () => {
|
||||||
describe('attachments', () => {
|
describe('attachments', () => {
|
||||||
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
|
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
|
||||||
const FILE_CONTENTS = readFileSync(FILE_PATH);
|
const FILE_CONTENTS = readFileSync(FILE_PATH);
|
||||||
|
const FILE_HASH = sha256(FILE_CONTENTS);
|
||||||
let tempDir: string;
|
let tempDir: string;
|
||||||
|
|
||||||
function generateAttachmentKeys(): Uint8Array {
|
function generateAttachmentKeys(): Uint8Array {
|
||||||
|
@ -601,98 +603,108 @@ describe('Crypto', () => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it('v2 roundtrips smaller file (all on disk)', async () => {
|
describe('v2 roundtrips', () => {
|
||||||
const keys = generateAttachmentKeys();
|
async function testV2RoundTripData({
|
||||||
let plaintextPath;
|
path,
|
||||||
let ciphertextPath;
|
data,
|
||||||
|
plaintextHash,
|
||||||
|
}: {
|
||||||
|
path?: string;
|
||||||
|
data: Uint8Array;
|
||||||
|
plaintextHash: Uint8Array;
|
||||||
|
}): Promise<void> {
|
||||||
|
let plaintextPath;
|
||||||
|
let ciphertextPath;
|
||||||
|
const keys = generateAttachmentKeys();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||||
keys,
|
keys,
|
||||||
plaintextAbsolutePath: FILE_PATH,
|
plaintext: path ? { absolutePath: path } : { data },
|
||||||
});
|
});
|
||||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
|
||||||
encryptedAttachment.path
|
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
);
|
encryptedAttachment.path
|
||||||
const decryptedAttachment = await decryptAttachmentV2({
|
);
|
||||||
ciphertextPath,
|
|
||||||
idForLogging: 'test',
|
const decryptedAttachment = await decryptAttachmentV2({
|
||||||
...splitKeys(keys),
|
ciphertextPath,
|
||||||
size: FILE_CONTENTS.byteLength,
|
idForLogging: 'test',
|
||||||
theirDigest: encryptedAttachment.digest,
|
...splitKeys(keys),
|
||||||
});
|
size: data.byteLength,
|
||||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
theirDigest: encryptedAttachment.digest,
|
||||||
decryptedAttachment.path
|
});
|
||||||
);
|
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
const plaintext = readFileSync(plaintextPath);
|
decryptedAttachment.path
|
||||||
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
|
);
|
||||||
assert.strictEqual(encryptedAttachment.plaintextHash, GHOST_KITTY_HASH);
|
|
||||||
assert.strictEqual(
|
const plaintext = readFileSync(plaintextPath);
|
||||||
decryptedAttachment.plaintextHash,
|
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||||
encryptedAttachment.plaintextHash
|
assert.strictEqual(
|
||||||
);
|
encryptedAttachment.ciphertextSize,
|
||||||
} finally {
|
getAttachmentCiphertextLength(data.byteLength)
|
||||||
if (plaintextPath) {
|
);
|
||||||
unlinkSync(plaintextPath);
|
assert.strictEqual(
|
||||||
}
|
encryptedAttachment.plaintextHash,
|
||||||
if (ciphertextPath) {
|
Bytes.toHex(plaintextHash)
|
||||||
unlinkSync(ciphertextPath);
|
);
|
||||||
|
assert.strictEqual(
|
||||||
|
decryptedAttachment.plaintextHash,
|
||||||
|
encryptedAttachment.plaintextHash
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
if (plaintextPath) {
|
||||||
|
unlinkSync(plaintextPath);
|
||||||
|
}
|
||||||
|
if (ciphertextPath) {
|
||||||
|
unlinkSync(ciphertextPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
it('v2 roundtrips random data (all on disk)', async () => {
|
it('v2 roundtrips smaller file from disk', async () => {
|
||||||
const sourcePath = join(tempDir, 'random');
|
await testV2RoundTripData({
|
||||||
// Get sufficient large file to have more than 64kb of padding and
|
path: FILE_PATH,
|
||||||
// trigger push back on the streams.
|
data: FILE_CONTENTS,
|
||||||
const data = getRandomBytes(5 * 1024 * 1024);
|
plaintextHash: FILE_HASH,
|
||||||
const digest = sha256(data);
|
|
||||||
|
|
||||||
writeFileSync(sourcePath, data);
|
|
||||||
|
|
||||||
const keys = generateAttachmentKeys();
|
|
||||||
let plaintextPath;
|
|
||||||
let ciphertextPath;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
|
||||||
keys,
|
|
||||||
plaintextAbsolutePath: sourcePath,
|
|
||||||
});
|
});
|
||||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
});
|
||||||
encryptedAttachment.path
|
|
||||||
);
|
it('v2 roundtrips smaller file from memory', async () => {
|
||||||
const decryptedAttachment = await decryptAttachmentV2({
|
await testV2RoundTripData({
|
||||||
ciphertextPath,
|
data: FILE_CONTENTS,
|
||||||
idForLogging: 'test',
|
plaintextHash: FILE_HASH,
|
||||||
...splitKeys(keys),
|
|
||||||
size: data.byteLength,
|
|
||||||
theirDigest: encryptedAttachment.digest,
|
|
||||||
});
|
});
|
||||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
});
|
||||||
decryptedAttachment.path
|
|
||||||
);
|
it('v2 roundtrips large file from disk', async () => {
|
||||||
const plaintext = readFileSync(plaintextPath);
|
const sourcePath = join(tempDir, 'random');
|
||||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
// Get sufficient large file to have more than 64kb of padding and
|
||||||
assert.strictEqual(
|
// trigger push back on the streams.
|
||||||
encryptedAttachment.plaintextHash,
|
const data = getRandomBytes(5 * 1024 * 1024);
|
||||||
Bytes.toHex(digest)
|
const plaintextHash = sha256(data);
|
||||||
);
|
writeFileSync(sourcePath, data);
|
||||||
assert.strictEqual(
|
try {
|
||||||
decryptedAttachment.plaintextHash,
|
await testV2RoundTripData({
|
||||||
encryptedAttachment.plaintextHash
|
path: sourcePath,
|
||||||
);
|
data,
|
||||||
} finally {
|
plaintextHash,
|
||||||
if (sourcePath) {
|
});
|
||||||
|
} finally {
|
||||||
unlinkSync(sourcePath);
|
unlinkSync(sourcePath);
|
||||||
}
|
}
|
||||||
if (plaintextPath) {
|
});
|
||||||
unlinkSync(plaintextPath);
|
|
||||||
}
|
it('v2 roundtrips large file from memory', async () => {
|
||||||
if (ciphertextPath) {
|
// Get sufficient large data to have more than 64kb of padding and
|
||||||
unlinkSync(ciphertextPath);
|
// trigger push back on the streams.
|
||||||
}
|
const data = getRandomBytes(5 * 1024 * 1024);
|
||||||
}
|
const plaintextHash = sha256(data);
|
||||||
|
await testV2RoundTripData({
|
||||||
|
data,
|
||||||
|
plaintextHash,
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('v2 -> v1 (disk -> memory)', async () => {
|
it('v2 -> v1 (disk -> memory)', async () => {
|
||||||
|
@ -702,7 +714,7 @@ describe('Crypto', () => {
|
||||||
try {
|
try {
|
||||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||||
keys,
|
keys,
|
||||||
plaintextAbsolutePath: FILE_PATH,
|
plaintext: { absolutePath: FILE_PATH },
|
||||||
});
|
});
|
||||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
encryptedAttachment.path
|
encryptedAttachment.path
|
||||||
|
@ -755,7 +767,7 @@ describe('Crypto', () => {
|
||||||
|
|
||||||
const encryptedAttachmentV2 = await encryptAttachmentV2ToDisk({
|
const encryptedAttachmentV2 = await encryptAttachmentV2ToDisk({
|
||||||
keys,
|
keys,
|
||||||
plaintextAbsolutePath: FILE_PATH,
|
plaintext: { absolutePath: FILE_PATH },
|
||||||
dangerousTestOnlyIv,
|
dangerousTestOnlyIv,
|
||||||
});
|
});
|
||||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
|
@ -790,7 +802,7 @@ describe('Crypto', () => {
|
||||||
try {
|
try {
|
||||||
innerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
innerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||||
keys: innerKeys,
|
keys: innerKeys,
|
||||||
plaintextAbsolutePath,
|
plaintext: { absolutePath: plaintextAbsolutePath },
|
||||||
});
|
});
|
||||||
innerCiphertextPath =
|
innerCiphertextPath =
|
||||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
|
@ -799,7 +811,7 @@ describe('Crypto', () => {
|
||||||
|
|
||||||
const outerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
const outerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||||
keys: outerKeys,
|
keys: outerKeys,
|
||||||
plaintextAbsolutePath: innerCiphertextPath,
|
plaintext: { absolutePath: innerCiphertextPath },
|
||||||
// We (and the server!) don't pad the second layer
|
// We (and the server!) don't pad the second layer
|
||||||
dangerousTestOnlySkipPadding: true,
|
dangerousTestOnlySkipPadding: true,
|
||||||
});
|
});
|
||||||
|
|
|
@ -539,7 +539,7 @@ function makeHTTPError(
|
||||||
|
|
||||||
const URL_CALLS = {
|
const URL_CALLS = {
|
||||||
accountExistence: 'v1/accounts/account',
|
accountExistence: 'v1/accounts/account',
|
||||||
attachmentId: 'v3/attachments/form/upload',
|
attachmentUploadForm: 'v3/attachments/form/upload',
|
||||||
attestation: 'v1/attestation',
|
attestation: 'v1/attestation',
|
||||||
batchIdentityCheck: 'v1/profile/identity_check/batch',
|
batchIdentityCheck: 'v1/profile/identity_check/batch',
|
||||||
challenge: 'v1/challenge',
|
challenge: 'v1/challenge',
|
||||||
|
@ -604,7 +604,7 @@ const WEBSOCKET_CALLS = new Set<keyof typeof URL_CALLS>([
|
||||||
'profile',
|
'profile',
|
||||||
|
|
||||||
// AttachmentControllerV3
|
// AttachmentControllerV3
|
||||||
'attachmentId',
|
'attachmentUploadForm',
|
||||||
|
|
||||||
// RemoteConfigController
|
// RemoteConfigController
|
||||||
'config',
|
'config',
|
||||||
|
@ -1192,6 +1192,7 @@ export type WebAPIType = {
|
||||||
timeout?: number;
|
timeout?: number;
|
||||||
};
|
};
|
||||||
}) => Promise<Readable>;
|
}) => Promise<Readable>;
|
||||||
|
getAttachmentUploadForm: () => Promise<AttachmentV3ResponseType>;
|
||||||
getAvatar: (path: string) => Promise<Uint8Array>;
|
getAvatar: (path: string) => Promise<Uint8Array>;
|
||||||
getHasSubscription: (subscriberId: Uint8Array) => Promise<boolean>;
|
getHasSubscription: (subscriberId: Uint8Array) => Promise<boolean>;
|
||||||
getGroup: (options: GroupCredentialsType) => Promise<Proto.IGroupResponse>;
|
getGroup: (options: GroupCredentialsType) => Promise<Proto.IGroupResponse>;
|
||||||
|
@ -1279,7 +1280,10 @@ export type WebAPIType = {
|
||||||
postBatchIdentityCheck: (
|
postBatchIdentityCheck: (
|
||||||
elements: VerifyServiceIdRequestType
|
elements: VerifyServiceIdRequestType
|
||||||
) => Promise<VerifyServiceIdResponseType>;
|
) => Promise<VerifyServiceIdResponseType>;
|
||||||
putEncryptedAttachment: (encryptedBin: Uint8Array) => Promise<string>;
|
putEncryptedAttachment: (
|
||||||
|
encryptedBin: Uint8Array | Readable,
|
||||||
|
uploadForm: AttachmentV3ResponseType
|
||||||
|
) => Promise<void>;
|
||||||
putProfile: (
|
putProfile: (
|
||||||
jsonData: ProfileRequestDataType
|
jsonData: ProfileRequestDataType
|
||||||
) => Promise<UploadAvatarHeadersType | undefined>;
|
) => Promise<UploadAvatarHeadersType | undefined>;
|
||||||
|
@ -1669,6 +1673,7 @@ export function initialize({
|
||||||
getAccountForUsername,
|
getAccountForUsername,
|
||||||
getAttachment,
|
getAttachment,
|
||||||
getAttachmentFromBackupTier,
|
getAttachmentFromBackupTier,
|
||||||
|
getAttachmentUploadForm,
|
||||||
getAvatar,
|
getAvatar,
|
||||||
getBackupCredentials,
|
getBackupCredentials,
|
||||||
getBackupCDNCredentials,
|
getBackupCDNCredentials,
|
||||||
|
@ -3472,16 +3477,21 @@ export function initialize({
|
||||||
return combinedStream;
|
return combinedStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function putEncryptedAttachment(encryptedBin: Uint8Array) {
|
async function getAttachmentUploadForm() {
|
||||||
const response = attachmentV3Response.parse(
|
return attachmentV3Response.parse(
|
||||||
await _ajax({
|
await _ajax({
|
||||||
call: 'attachmentId',
|
call: 'attachmentUploadForm',
|
||||||
httpType: 'GET',
|
httpType: 'GET',
|
||||||
responseType: 'json',
|
responseType: 'json',
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const { signedUploadLocation, key: cdnKey, headers } = response;
|
async function putEncryptedAttachment(
|
||||||
|
encryptedBin: Uint8Array | Readable,
|
||||||
|
uploadForm: AttachmentV3ResponseType
|
||||||
|
) {
|
||||||
|
const { signedUploadLocation, headers } = uploadForm;
|
||||||
|
|
||||||
// This is going to the CDN, not the service, so we use _outerAjax
|
// This is going to the CDN, not the service, so we use _outerAjax
|
||||||
const { response: uploadResponse } = await _outerAjax(
|
const { response: uploadResponse } = await _outerAjax(
|
||||||
|
@ -3524,8 +3534,6 @@ export function initialize({
|
||||||
return `${tmp}[REDACTED]`;
|
return `${tmp}[REDACTED]`;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
return cdnKey;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getHeaderPadding() {
|
function getHeaderPadding() {
|
||||||
|
|
|
@ -1012,13 +1012,13 @@ export type AttachmentDownloadableFromBackupTier = WithRequiredProperties<
|
||||||
'backupLocator'
|
'backupLocator'
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export type DownloadedAttachment = WithRequiredProperties<
|
export type LocallySavedAttachment = WithRequiredProperties<
|
||||||
AttachmentType,
|
AttachmentType,
|
||||||
'path'
|
'path'
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export type AttachmentReadyForBackup = WithRequiredProperties<
|
export type AttachmentReadyForBackup = WithRequiredProperties<
|
||||||
DownloadedAttachment,
|
LocallySavedAttachment,
|
||||||
RequiredPropertiesForDecryption
|
RequiredPropertiesForDecryption
|
||||||
>;
|
>;
|
||||||
|
|
||||||
|
@ -1052,8 +1052,8 @@ export function isDownloadableFromBackupTier(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isDownloadedToLocalFile(
|
export function isAttachmentLocallySaved(
|
||||||
attachment: AttachmentType
|
attachment: AttachmentType
|
||||||
): attachment is DownloadedAttachment {
|
): attachment is LocallySavedAttachment {
|
||||||
return Boolean(attachment.path);
|
return Boolean(attachment.path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,34 +1,46 @@
|
||||||
// Copyright 2023 Signal Messenger, LLC
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
import { createReadStream } from 'fs';
|
||||||
import type {
|
import type {
|
||||||
AttachmentWithHydratedData,
|
AttachmentWithHydratedData,
|
||||||
UploadedAttachmentType,
|
UploadedAttachmentType,
|
||||||
} from '../types/Attachment';
|
} from '../types/Attachment';
|
||||||
import { MIMETypeToString } from '../types/MIME';
|
import { MIMETypeToString } from '../types/MIME';
|
||||||
import { padAndEncryptAttachment, getRandomBytes } from '../Crypto';
|
import { getRandomBytes } from '../Crypto';
|
||||||
import { strictAssert } from './assert';
|
import { strictAssert } from './assert';
|
||||||
|
import { backupsService } from '../services/backups';
|
||||||
|
import { tusUpload } from './uploads/tusProtocol';
|
||||||
|
import { defaultFileReader } from './uploads/uploads';
|
||||||
|
import type { AttachmentV3ResponseType } from '../textsecure/WebAPI';
|
||||||
|
import {
|
||||||
|
type EncryptedAttachmentV2,
|
||||||
|
encryptAttachmentV2ToDisk,
|
||||||
|
safeUnlinkSync,
|
||||||
|
type PlaintextSourceType,
|
||||||
|
} from '../AttachmentCrypto';
|
||||||
|
import { missingCaseError } from './missingCaseError';
|
||||||
|
|
||||||
|
const CDNS_SUPPORTING_TUS = new Set([3]);
|
||||||
|
|
||||||
export async function uploadAttachment(
|
export async function uploadAttachment(
|
||||||
attachment: AttachmentWithHydratedData
|
attachment: AttachmentWithHydratedData
|
||||||
): Promise<UploadedAttachmentType> {
|
): Promise<UploadedAttachmentType> {
|
||||||
const keys = getRandomBytes(64);
|
|
||||||
const encrypted = padAndEncryptAttachment({
|
|
||||||
plaintext: attachment.data,
|
|
||||||
keys,
|
|
||||||
});
|
|
||||||
|
|
||||||
const { server } = window.textsecure;
|
const { server } = window.textsecure;
|
||||||
strictAssert(server, 'WebAPI must be initialized');
|
strictAssert(server, 'WebAPI must be initialized');
|
||||||
|
|
||||||
const cdnKey = await server.putEncryptedAttachment(encrypted.ciphertext);
|
const keys = getRandomBytes(64);
|
||||||
const size = attachment.data.byteLength;
|
|
||||||
|
const { cdnKey, cdnNumber, encrypted } = await encryptAndUploadAttachment({
|
||||||
|
plaintext: { data: attachment.data },
|
||||||
|
keys,
|
||||||
|
uploadType: 'standard',
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
cdnKey,
|
cdnKey,
|
||||||
cdnNumber: 2,
|
cdnNumber,
|
||||||
key: keys,
|
key: keys,
|
||||||
size,
|
size: attachment.data.byteLength,
|
||||||
digest: encrypted.digest,
|
digest: encrypted.digest,
|
||||||
plaintextHash: encrypted.plaintextHash,
|
plaintextHash: encrypted.plaintextHash,
|
||||||
|
|
||||||
|
@ -41,3 +53,91 @@ export async function uploadAttachment(
|
||||||
blurHash: attachment.blurHash,
|
blurHash: attachment.blurHash,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function encryptAndUploadAttachment({
|
||||||
|
plaintext,
|
||||||
|
keys,
|
||||||
|
uploadType,
|
||||||
|
}: {
|
||||||
|
plaintext: PlaintextSourceType;
|
||||||
|
keys: Uint8Array;
|
||||||
|
uploadType: 'standard' | 'backup';
|
||||||
|
}): Promise<{
|
||||||
|
cdnKey: string;
|
||||||
|
cdnNumber: number;
|
||||||
|
encrypted: EncryptedAttachmentV2;
|
||||||
|
}> {
|
||||||
|
const { server } = window.textsecure;
|
||||||
|
strictAssert(server, 'WebAPI must be initialized');
|
||||||
|
|
||||||
|
let uploadForm: AttachmentV3ResponseType;
|
||||||
|
let absoluteCiphertextPath: string | undefined;
|
||||||
|
|
||||||
|
try {
|
||||||
|
switch (uploadType) {
|
||||||
|
case 'standard':
|
||||||
|
uploadForm = await server.getAttachmentUploadForm();
|
||||||
|
break;
|
||||||
|
case 'backup':
|
||||||
|
uploadForm = await server.getBackupMediaUploadForm(
|
||||||
|
await backupsService.credentials.getHeadersForToday()
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw missingCaseError(uploadType);
|
||||||
|
}
|
||||||
|
|
||||||
|
const encrypted = await encryptAttachmentV2ToDisk({
|
||||||
|
plaintext,
|
||||||
|
keys,
|
||||||
|
});
|
||||||
|
|
||||||
|
absoluteCiphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||||
|
encrypted.path
|
||||||
|
);
|
||||||
|
|
||||||
|
await uploadFile({
|
||||||
|
absoluteCiphertextPath,
|
||||||
|
ciphertextFileSize: encrypted.ciphertextSize,
|
||||||
|
uploadForm,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { cdnKey: uploadForm.key, cdnNumber: uploadForm.cdn, encrypted };
|
||||||
|
} finally {
|
||||||
|
if (absoluteCiphertextPath) {
|
||||||
|
safeUnlinkSync(absoluteCiphertextPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadFile({
|
||||||
|
absoluteCiphertextPath,
|
||||||
|
ciphertextFileSize,
|
||||||
|
uploadForm,
|
||||||
|
}: {
|
||||||
|
absoluteCiphertextPath: string;
|
||||||
|
ciphertextFileSize: number;
|
||||||
|
uploadForm: AttachmentV3ResponseType;
|
||||||
|
}): Promise<void> {
|
||||||
|
const { server } = window.textsecure;
|
||||||
|
strictAssert(server, 'WebAPI must be initialized');
|
||||||
|
|
||||||
|
if (CDNS_SUPPORTING_TUS.has(uploadForm.cdn)) {
|
||||||
|
const fetchFn = server.createFetchForAttachmentUpload(uploadForm);
|
||||||
|
await tusUpload({
|
||||||
|
endpoint: uploadForm.signedUploadLocation,
|
||||||
|
// the upload form headers are already included in the created fetch function
|
||||||
|
headers: {},
|
||||||
|
fileName: uploadForm.key,
|
||||||
|
filePath: absoluteCiphertextPath,
|
||||||
|
fileSize: ciphertextFileSize,
|
||||||
|
reader: defaultFileReader,
|
||||||
|
fetchFn,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await server.putEncryptedAttachment(
|
||||||
|
createReadStream(absoluteCiphertextPath),
|
||||||
|
uploadForm
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -20,6 +20,13 @@ function toLogId(input: string) {
|
||||||
return Buffer.from(input).toString('base64').slice(0, 3);
|
return Buffer.from(input).toString('base64').slice(0, 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function redactedUrl(endpoint: string) {
|
||||||
|
const redacted = new URL(endpoint);
|
||||||
|
redacted.search = '';
|
||||||
|
redacted.pathname = '';
|
||||||
|
return `${redacted}[REDACTED]`;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This file is a standalone implementation of the TUS protocol.
|
* This file is a standalone implementation of the TUS protocol.
|
||||||
* Signal specific logic is in uploads.ts
|
* Signal specific logic is in uploads.ts
|
||||||
|
@ -94,7 +101,10 @@ export async function _tusCreateWithUploadRequest({
|
||||||
signal?: AbortSignal;
|
signal?: AbortSignal;
|
||||||
fetchFn?: FetchFunctionType;
|
fetchFn?: FetchFunctionType;
|
||||||
}): Promise<boolean> {
|
}): Promise<boolean> {
|
||||||
const logId = `tusProtocol: CreateWithUpload(${toLogId(fileName)})`;
|
const logId = `tusProtocol: CreateWithUpload(${toLogId(
|
||||||
|
fileName
|
||||||
|
)}): POST ${redactedUrl(endpoint)}`;
|
||||||
|
|
||||||
if (onProgress != null) {
|
if (onProgress != null) {
|
||||||
addProgressHandler(readable, onProgress);
|
addProgressHandler(readable, onProgress);
|
||||||
}
|
}
|
||||||
|
@ -160,7 +170,10 @@ export async function _tusGetCurrentOffsetRequest({
|
||||||
signal?: AbortSignal;
|
signal?: AbortSignal;
|
||||||
fetchFn?: FetchFunctionType;
|
fetchFn?: FetchFunctionType;
|
||||||
}): Promise<number> {
|
}): Promise<number> {
|
||||||
const logId = `tusProtocol: GetCurrentOffsetRequest(${toLogId(fileName)})`;
|
const logId = `tusProtocol: GetCurrentOffsetRequest(${toLogId(
|
||||||
|
fileName
|
||||||
|
)}): HEAD ${redactedUrl(endpoint)}`;
|
||||||
|
|
||||||
log.info(`${logId} init`);
|
log.info(`${logId} init`);
|
||||||
|
|
||||||
const response = await fetchFn(`${endpoint}/${fileName}`, {
|
const response = await fetchFn(`${endpoint}/${fileName}`, {
|
||||||
|
@ -219,7 +232,9 @@ export async function _tusResumeUploadRequest({
|
||||||
signal?: AbortSignal;
|
signal?: AbortSignal;
|
||||||
fetchFn?: FetchFunctionType;
|
fetchFn?: FetchFunctionType;
|
||||||
}): Promise<boolean> {
|
}): Promise<boolean> {
|
||||||
const logId = `tusProtocol: ResumeUploadRequest(${toLogId(fileName)})`;
|
const logId = `tusProtocol: ResumeUploadRequest(${toLogId(
|
||||||
|
fileName
|
||||||
|
)}): PATCH ${redactedUrl(endpoint)}`;
|
||||||
if (onProgress != null) {
|
if (onProgress != null) {
|
||||||
addProgressHandler(readable, onProgress);
|
addProgressHandler(readable, onProgress);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue