Use TUS for attachment uploads
This commit is contained in:
parent
4253bed0bd
commit
a992546cda
9 changed files with 312 additions and 169 deletions
|
@ -1,7 +1,7 @@
|
|||
// Copyright 2020 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { unlinkSync } from 'fs';
|
||||
import { unlinkSync, createReadStream, createWriteStream } from 'fs';
|
||||
import { open } from 'fs/promises';
|
||||
import {
|
||||
createDecipheriv,
|
||||
|
@ -11,7 +11,7 @@ import {
|
|||
randomBytes,
|
||||
} from 'crypto';
|
||||
import type { Decipher, Hash, Hmac } from 'crypto';
|
||||
import { PassThrough, Transform, type Writable } from 'stream';
|
||||
import { PassThrough, Transform, type Writable, Readable } from 'stream';
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { ensureFile } from 'fs-extra';
|
||||
import * as log from './logging/log';
|
||||
|
@ -47,6 +47,7 @@ export function _generateAttachmentIv(): Uint8Array {
|
|||
export type EncryptedAttachmentV2 = {
|
||||
digest: Uint8Array;
|
||||
plaintextHash: string;
|
||||
ciphertextSize: number;
|
||||
};
|
||||
|
||||
export type DecryptedAttachmentV2 = {
|
||||
|
@ -54,9 +55,13 @@ export type DecryptedAttachmentV2 = {
|
|||
plaintextHash: string;
|
||||
};
|
||||
|
||||
export type PlaintextSourceType =
|
||||
| { data: Uint8Array }
|
||||
| { absolutePath: string };
|
||||
|
||||
type EncryptAttachmentV2PropsType = {
|
||||
plaintext: PlaintextSourceType;
|
||||
keys: Readonly<Uint8Array>;
|
||||
plaintextAbsolutePath: string;
|
||||
dangerousTestOnlyIv?: Readonly<Uint8Array>;
|
||||
dangerousTestOnlySkipPadding?: boolean;
|
||||
};
|
||||
|
@ -69,21 +74,18 @@ export async function encryptAttachmentV2ToDisk(
|
|||
const absoluteTargetPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
|
||||
let writeFd;
|
||||
await ensureFile(absoluteTargetPath);
|
||||
|
||||
let encryptResult: EncryptedAttachmentV2;
|
||||
|
||||
try {
|
||||
await ensureFile(absoluteTargetPath);
|
||||
writeFd = await open(absoluteTargetPath, 'w');
|
||||
encryptResult = await encryptAttachmentV2({
|
||||
...args,
|
||||
sink: writeFd.createWriteStream(),
|
||||
sink: createWriteStream(absoluteTargetPath),
|
||||
});
|
||||
} catch (error) {
|
||||
safeUnlinkSync(absoluteTargetPath);
|
||||
throw error;
|
||||
} finally {
|
||||
await writeFd?.close();
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -91,12 +93,11 @@ export async function encryptAttachmentV2ToDisk(
|
|||
path: relativeTargetPath,
|
||||
};
|
||||
}
|
||||
|
||||
export async function encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath,
|
||||
plaintext,
|
||||
dangerousTestOnlyIv,
|
||||
dangerousTestOnlySkipPadding = false,
|
||||
dangerousTestOnlySkipPadding,
|
||||
sink,
|
||||
}: EncryptAttachmentV2PropsType & {
|
||||
sink?: Writable;
|
||||
|
@ -117,27 +118,29 @@ export async function encryptAttachmentV2({
|
|||
);
|
||||
}
|
||||
const iv = dangerousTestOnlyIv || _generateAttachmentIv();
|
||||
|
||||
const plaintextHash = createHash(HashType.size256);
|
||||
const digest = createHash(HashType.size256);
|
||||
|
||||
let readFd;
|
||||
let ciphertextSize: number | undefined;
|
||||
|
||||
try {
|
||||
try {
|
||||
readFd = await open(plaintextAbsolutePath, 'r');
|
||||
} catch (cause) {
|
||||
throw new Error(`${logId}: Read path doesn't exist`, { cause });
|
||||
}
|
||||
const source =
|
||||
'data' in plaintext
|
||||
? Readable.from(plaintext.data)
|
||||
: createReadStream(plaintext.absolutePath);
|
||||
|
||||
await pipeline(
|
||||
[
|
||||
readFd.createReadStream(),
|
||||
source,
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
|
||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependIv(iv),
|
||||
appendMacStream(macKey),
|
||||
peekAndUpdateHash(digest),
|
||||
measureSize(size => {
|
||||
ciphertextSize = size;
|
||||
}),
|
||||
sink ?? new PassThrough().resume(),
|
||||
].filter(isNotNil)
|
||||
);
|
||||
|
@ -147,8 +150,6 @@ export async function encryptAttachmentV2({
|
|||
Errors.toLogFormat(error)
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
await readFd?.close();
|
||||
}
|
||||
|
||||
const ourPlaintextHash = plaintextHash.digest('hex');
|
||||
|
@ -164,9 +165,12 @@ export async function encryptAttachmentV2({
|
|||
`${logId}: Failed to generate ourDigest!`
|
||||
);
|
||||
|
||||
strictAssert(ciphertextSize != null, 'Failed to measure ciphertext size!');
|
||||
|
||||
return {
|
||||
digest: ourDigest,
|
||||
plaintextHash: ourPlaintextHash,
|
||||
ciphertextSize,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -464,6 +468,18 @@ function trimPadding(size: number) {
|
|||
});
|
||||
}
|
||||
|
||||
export function measureSize(onComplete: (size: number) => void): Transform {
|
||||
let totalBytes = 0;
|
||||
const passthrough = new PassThrough();
|
||||
passthrough.on('data', chunk => {
|
||||
totalBytes += chunk.length;
|
||||
});
|
||||
passthrough.on('end', () => {
|
||||
onComplete(totalBytes);
|
||||
});
|
||||
return passthrough;
|
||||
}
|
||||
|
||||
export function getAttachmentCiphertextLength(plaintextLength: number): number {
|
||||
const paddedPlaintextSize = logPadSize(plaintextLength);
|
||||
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import { tusUpload } from '../../util/uploads/tusProtocol';
|
||||
import { defaultFileReader } from '../../util/uploads/uploads';
|
||||
import type {
|
||||
WebAPIType,
|
||||
AttachmentV3ResponseType,
|
||||
|
@ -13,6 +11,7 @@ import type {
|
|||
BackupListMediaResponseType,
|
||||
} from '../../textsecure/WebAPI';
|
||||
import type { BackupCredentials } from './credentials';
|
||||
import { uploadFile } from '../../util/uploadAttachment';
|
||||
|
||||
export class BackupAPI {
|
||||
private cachedBackupInfo: GetBackupInfoResponseType | undefined;
|
||||
|
@ -60,16 +59,10 @@ export class BackupAPI {
|
|||
await this.credentials.getHeadersForToday()
|
||||
);
|
||||
|
||||
const fetchFn = this.server.createFetchForAttachmentUpload(form);
|
||||
|
||||
await tusUpload({
|
||||
endpoint: form.signedUploadLocation,
|
||||
headers: {},
|
||||
fileName: form.key,
|
||||
filePath,
|
||||
fileSize,
|
||||
reader: defaultFileReader,
|
||||
fetchFn,
|
||||
await uploadFile({
|
||||
absoluteCiphertextPath: filePath,
|
||||
ciphertextFileSize: fileSize,
|
||||
uploadForm: form,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,11 @@ import { HOUR } from '../../util/durations';
|
|||
import { CipherType, HashType } from '../../types/Crypto';
|
||||
import * as Errors from '../../types/errors';
|
||||
import { constantTimeEqual } from '../../Crypto';
|
||||
import { getIvAndDecipher, getMacAndUpdateHmac } from '../../AttachmentCrypto';
|
||||
import {
|
||||
getIvAndDecipher,
|
||||
getMacAndUpdateHmac,
|
||||
measureSize,
|
||||
} from '../../AttachmentCrypto';
|
||||
import { BackupExportStream } from './export';
|
||||
import { BackupImportStream } from './import';
|
||||
import { getKeyMaterial } from './crypto';
|
||||
|
@ -200,17 +204,8 @@ export class BackupsService {
|
|||
|
||||
const iv = randomBytes(IV_LENGTH);
|
||||
|
||||
const pass = new PassThrough();
|
||||
|
||||
let totalBytes = 0;
|
||||
|
||||
// Pause the flow first so that the we respect backpressure. The
|
||||
// `pipeline` call below will control the flow anyway.
|
||||
pass.pause();
|
||||
pass.on('data', chunk => {
|
||||
totalBytes += chunk.length;
|
||||
});
|
||||
|
||||
await pipeline(
|
||||
recordStream,
|
||||
createGzip(),
|
||||
|
@ -218,7 +213,9 @@ export class BackupsService {
|
|||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependStream(iv),
|
||||
appendMacStream(macKey),
|
||||
pass,
|
||||
measureSize(size => {
|
||||
totalBytes = size;
|
||||
}),
|
||||
sink
|
||||
);
|
||||
|
||||
|
|
|
@ -11,10 +11,10 @@ import {
|
|||
type AttachmentType,
|
||||
isDownloadableFromTransitTier,
|
||||
isDownloadableFromBackupTier,
|
||||
isDownloadedToLocalFile,
|
||||
isAttachmentLocallySaved,
|
||||
type AttachmentDownloadableFromTransitTier,
|
||||
type AttachmentDownloadableFromBackupTier,
|
||||
type DownloadedAttachment,
|
||||
type LocallySavedAttachment,
|
||||
type AttachmentReadyForBackup,
|
||||
} from '../../../types/Attachment';
|
||||
import { Backups } from '../../../protobuf';
|
||||
|
@ -117,7 +117,7 @@ export function convertFilePointerToAttachment(
|
|||
* along with the new keys.
|
||||
*/
|
||||
async function fixupAttachmentForBackup(
|
||||
attachment: DownloadedAttachment
|
||||
attachment: LocallySavedAttachment
|
||||
): Promise<AttachmentReadyForBackup> {
|
||||
const fixedUpAttachment = { ...attachment };
|
||||
const keyToUse = attachment.key ?? Bytes.toBase64(getRandomBytes(64));
|
||||
|
@ -133,9 +133,11 @@ async function fixupAttachmentForBackup(
|
|||
// encrypt this file in memory in order to calculate the digest
|
||||
const { digest } = await encryptAttachmentV2({
|
||||
keys: Bytes.fromBase64(keyToUse),
|
||||
plaintextAbsolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
plaintext: {
|
||||
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
digestToUse = Bytes.toBase64(digest);
|
||||
|
@ -175,7 +177,7 @@ export async function convertAttachmentToFilePointer({
|
|||
blurHash: attachment.blurHash,
|
||||
});
|
||||
|
||||
if (!isDownloadedToLocalFile(attachment)) {
|
||||
if (!isAttachmentLocallySaved(attachment)) {
|
||||
// 1. If the attachment is undownloaded, we cannot trust its digest / mediaName. Thus,
|
||||
// we only include a BackupLocator if this attachment already had one (e.g. we
|
||||
// restored it from a backup and it had a BackupLocator then, which means we have at
|
||||
|
@ -212,7 +214,7 @@ export async function convertAttachmentToFilePointer({
|
|||
});
|
||||
}
|
||||
|
||||
if (!isDownloadedToLocalFile(attachment)) {
|
||||
if (!isAttachmentLocallySaved(attachment)) {
|
||||
return new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
invalidAttachmentLocator: getInvalidAttachmentLocator(),
|
||||
|
|
|
@ -42,6 +42,7 @@ import {
|
|||
decryptAttachmentV2,
|
||||
encryptAttachmentV2ToDisk,
|
||||
getAesCbcCiphertextLength,
|
||||
getAttachmentCiphertextLength,
|
||||
splitKeys,
|
||||
} from '../AttachmentCrypto';
|
||||
import { createTempDir, deleteTempDir } from '../updater/common';
|
||||
|
@ -531,6 +532,7 @@ describe('Crypto', () => {
|
|||
describe('attachments', () => {
|
||||
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
|
||||
const FILE_CONTENTS = readFileSync(FILE_PATH);
|
||||
const FILE_HASH = sha256(FILE_CONTENTS);
|
||||
let tempDir: string;
|
||||
|
||||
function generateAttachmentKeys(): Uint8Array {
|
||||
|
@ -601,98 +603,108 @@ describe('Crypto', () => {
|
|||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips smaller file (all on disk)', async () => {
|
||||
const keys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
describe('v2 roundtrips', () => {
|
||||
async function testV2RoundTripData({
|
||||
path,
|
||||
data,
|
||||
plaintextHash,
|
||||
}: {
|
||||
path?: string;
|
||||
data: Uint8Array;
|
||||
plaintextHash: Uint8Array;
|
||||
}): Promise<void> {
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
const keys = generateAttachmentKeys();
|
||||
|
||||
try {
|
||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
|
||||
assert.strictEqual(encryptedAttachment.plaintextHash, GHOST_KITTY_HASH);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (ciphertextPath) {
|
||||
unlinkSync(ciphertextPath);
|
||||
try {
|
||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintext: path ? { absolutePath: path } : { data },
|
||||
});
|
||||
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
);
|
||||
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.ciphertextSize,
|
||||
getAttachmentCiphertextLength(data.byteLength)
|
||||
);
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.plaintextHash,
|
||||
Bytes.toHex(plaintextHash)
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (ciphertextPath) {
|
||||
unlinkSync(ciphertextPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips random data (all on disk)', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
const digest = sha256(data);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const keys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
|
||||
try {
|
||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
it('v2 roundtrips smaller file from disk', async () => {
|
||||
await testV2RoundTripData({
|
||||
path: FILE_PATH,
|
||||
data: FILE_CONTENTS,
|
||||
plaintextHash: FILE_HASH,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
|
||||
it('v2 roundtrips smaller file from memory', async () => {
|
||||
await testV2RoundTripData({
|
||||
data: FILE_CONTENTS,
|
||||
plaintextHash: FILE_HASH,
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.plaintextHash,
|
||||
Bytes.toHex(digest)
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
});
|
||||
|
||||
it('v2 roundtrips large file from disk', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
const plaintextHash = sha256(data);
|
||||
writeFileSync(sourcePath, data);
|
||||
try {
|
||||
await testV2RoundTripData({
|
||||
path: sourcePath,
|
||||
data,
|
||||
plaintextHash,
|
||||
});
|
||||
} finally {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (ciphertextPath) {
|
||||
unlinkSync(ciphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips large file from memory', async () => {
|
||||
// Get sufficient large data to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
const plaintextHash = sha256(data);
|
||||
await testV2RoundTripData({
|
||||
data,
|
||||
plaintextHash,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('v2 -> v1 (disk -> memory)', async () => {
|
||||
|
@ -702,7 +714,7 @@ describe('Crypto', () => {
|
|||
try {
|
||||
const encryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
plaintext: { absolutePath: FILE_PATH },
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
|
@ -755,7 +767,7 @@ describe('Crypto', () => {
|
|||
|
||||
const encryptedAttachmentV2 = await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
plaintext: { absolutePath: FILE_PATH },
|
||||
dangerousTestOnlyIv,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
|
@ -790,7 +802,7 @@ describe('Crypto', () => {
|
|||
try {
|
||||
innerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys: innerKeys,
|
||||
plaintextAbsolutePath,
|
||||
plaintext: { absolutePath: plaintextAbsolutePath },
|
||||
});
|
||||
innerCiphertextPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
|
@ -799,7 +811,7 @@ describe('Crypto', () => {
|
|||
|
||||
const outerEncryptedAttachment = await encryptAttachmentV2ToDisk({
|
||||
keys: outerKeys,
|
||||
plaintextAbsolutePath: innerCiphertextPath,
|
||||
plaintext: { absolutePath: innerCiphertextPath },
|
||||
// We (and the server!) don't pad the second layer
|
||||
dangerousTestOnlySkipPadding: true,
|
||||
});
|
||||
|
|
|
@ -539,7 +539,7 @@ function makeHTTPError(
|
|||
|
||||
const URL_CALLS = {
|
||||
accountExistence: 'v1/accounts/account',
|
||||
attachmentId: 'v3/attachments/form/upload',
|
||||
attachmentUploadForm: 'v3/attachments/form/upload',
|
||||
attestation: 'v1/attestation',
|
||||
batchIdentityCheck: 'v1/profile/identity_check/batch',
|
||||
challenge: 'v1/challenge',
|
||||
|
@ -604,7 +604,7 @@ const WEBSOCKET_CALLS = new Set<keyof typeof URL_CALLS>([
|
|||
'profile',
|
||||
|
||||
// AttachmentControllerV3
|
||||
'attachmentId',
|
||||
'attachmentUploadForm',
|
||||
|
||||
// RemoteConfigController
|
||||
'config',
|
||||
|
@ -1200,6 +1200,7 @@ export type WebAPIType = {
|
|||
timeout?: number;
|
||||
};
|
||||
}) => Promise<Readable>;
|
||||
getAttachmentUploadForm: () => Promise<AttachmentV3ResponseType>;
|
||||
getAvatar: (path: string) => Promise<Uint8Array>;
|
||||
getHasSubscription: (subscriberId: Uint8Array) => Promise<boolean>;
|
||||
getGroup: (options: GroupCredentialsType) => Promise<Proto.IGroupResponse>;
|
||||
|
@ -1287,7 +1288,10 @@ export type WebAPIType = {
|
|||
postBatchIdentityCheck: (
|
||||
elements: VerifyServiceIdRequestType
|
||||
) => Promise<VerifyServiceIdResponseType>;
|
||||
putEncryptedAttachment: (encryptedBin: Uint8Array) => Promise<string>;
|
||||
putEncryptedAttachment: (
|
||||
encryptedBin: Uint8Array | Readable,
|
||||
uploadForm: AttachmentV3ResponseType
|
||||
) => Promise<void>;
|
||||
putProfile: (
|
||||
jsonData: ProfileRequestDataType
|
||||
) => Promise<UploadAvatarHeadersType | undefined>;
|
||||
|
@ -1677,6 +1681,7 @@ export function initialize({
|
|||
getAccountForUsername,
|
||||
getAttachment,
|
||||
getAttachmentFromBackupTier,
|
||||
getAttachmentUploadForm,
|
||||
getAvatar,
|
||||
getBackupCredentials,
|
||||
getBackupCDNCredentials,
|
||||
|
@ -3480,16 +3485,21 @@ export function initialize({
|
|||
return combinedStream;
|
||||
}
|
||||
|
||||
async function putEncryptedAttachment(encryptedBin: Uint8Array) {
|
||||
const response = attachmentV3Response.parse(
|
||||
async function getAttachmentUploadForm() {
|
||||
return attachmentV3Response.parse(
|
||||
await _ajax({
|
||||
call: 'attachmentId',
|
||||
call: 'attachmentUploadForm',
|
||||
httpType: 'GET',
|
||||
responseType: 'json',
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const { signedUploadLocation, key: cdnKey, headers } = response;
|
||||
async function putEncryptedAttachment(
|
||||
encryptedBin: Uint8Array | Readable,
|
||||
uploadForm: AttachmentV3ResponseType
|
||||
) {
|
||||
const { signedUploadLocation, headers } = uploadForm;
|
||||
|
||||
// This is going to the CDN, not the service, so we use _outerAjax
|
||||
const { response: uploadResponse } = await _outerAjax(
|
||||
|
@ -3532,8 +3542,6 @@ export function initialize({
|
|||
return `${tmp}[REDACTED]`;
|
||||
},
|
||||
});
|
||||
|
||||
return cdnKey;
|
||||
}
|
||||
|
||||
function getHeaderPadding() {
|
||||
|
|
|
@ -1012,13 +1012,13 @@ export type AttachmentDownloadableFromBackupTier = WithRequiredProperties<
|
|||
'backupLocator'
|
||||
>;
|
||||
|
||||
export type DownloadedAttachment = WithRequiredProperties<
|
||||
export type LocallySavedAttachment = WithRequiredProperties<
|
||||
AttachmentType,
|
||||
'path'
|
||||
>;
|
||||
|
||||
export type AttachmentReadyForBackup = WithRequiredProperties<
|
||||
DownloadedAttachment,
|
||||
LocallySavedAttachment,
|
||||
RequiredPropertiesForDecryption
|
||||
>;
|
||||
|
||||
|
@ -1052,8 +1052,8 @@ export function isDownloadableFromBackupTier(
|
|||
return false;
|
||||
}
|
||||
|
||||
export function isDownloadedToLocalFile(
|
||||
export function isAttachmentLocallySaved(
|
||||
attachment: AttachmentType
|
||||
): attachment is DownloadedAttachment {
|
||||
): attachment is LocallySavedAttachment {
|
||||
return Boolean(attachment.path);
|
||||
}
|
||||
|
|
|
@ -1,34 +1,46 @@
|
|||
// Copyright 2023 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { createReadStream } from 'fs';
|
||||
import type {
|
||||
AttachmentWithHydratedData,
|
||||
UploadedAttachmentType,
|
||||
} from '../types/Attachment';
|
||||
import { MIMETypeToString } from '../types/MIME';
|
||||
import { padAndEncryptAttachment, getRandomBytes } from '../Crypto';
|
||||
import { getRandomBytes } from '../Crypto';
|
||||
import { strictAssert } from './assert';
|
||||
import { backupsService } from '../services/backups';
|
||||
import { tusUpload } from './uploads/tusProtocol';
|
||||
import { defaultFileReader } from './uploads/uploads';
|
||||
import type { AttachmentV3ResponseType } from '../textsecure/WebAPI';
|
||||
import {
|
||||
type EncryptedAttachmentV2,
|
||||
encryptAttachmentV2ToDisk,
|
||||
safeUnlinkSync,
|
||||
type PlaintextSourceType,
|
||||
} from '../AttachmentCrypto';
|
||||
import { missingCaseError } from './missingCaseError';
|
||||
|
||||
const CDNS_SUPPORTING_TUS = new Set([3]);
|
||||
|
||||
export async function uploadAttachment(
|
||||
attachment: AttachmentWithHydratedData
|
||||
): Promise<UploadedAttachmentType> {
|
||||
const keys = getRandomBytes(64);
|
||||
const encrypted = padAndEncryptAttachment({
|
||||
plaintext: attachment.data,
|
||||
keys,
|
||||
});
|
||||
|
||||
const { server } = window.textsecure;
|
||||
strictAssert(server, 'WebAPI must be initialized');
|
||||
|
||||
const cdnKey = await server.putEncryptedAttachment(encrypted.ciphertext);
|
||||
const size = attachment.data.byteLength;
|
||||
const keys = getRandomBytes(64);
|
||||
|
||||
const { cdnKey, cdnNumber, encrypted } = await encryptAndUploadAttachment({
|
||||
plaintext: { data: attachment.data },
|
||||
keys,
|
||||
uploadType: 'standard',
|
||||
});
|
||||
|
||||
return {
|
||||
cdnKey,
|
||||
cdnNumber: 2,
|
||||
cdnNumber,
|
||||
key: keys,
|
||||
size,
|
||||
size: attachment.data.byteLength,
|
||||
digest: encrypted.digest,
|
||||
plaintextHash: encrypted.plaintextHash,
|
||||
|
||||
|
@ -41,3 +53,91 @@ export async function uploadAttachment(
|
|||
blurHash: attachment.blurHash,
|
||||
};
|
||||
}
|
||||
|
||||
export async function encryptAndUploadAttachment({
|
||||
plaintext,
|
||||
keys,
|
||||
uploadType,
|
||||
}: {
|
||||
plaintext: PlaintextSourceType;
|
||||
keys: Uint8Array;
|
||||
uploadType: 'standard' | 'backup';
|
||||
}): Promise<{
|
||||
cdnKey: string;
|
||||
cdnNumber: number;
|
||||
encrypted: EncryptedAttachmentV2;
|
||||
}> {
|
||||
const { server } = window.textsecure;
|
||||
strictAssert(server, 'WebAPI must be initialized');
|
||||
|
||||
let uploadForm: AttachmentV3ResponseType;
|
||||
let absoluteCiphertextPath: string | undefined;
|
||||
|
||||
try {
|
||||
switch (uploadType) {
|
||||
case 'standard':
|
||||
uploadForm = await server.getAttachmentUploadForm();
|
||||
break;
|
||||
case 'backup':
|
||||
uploadForm = await server.getBackupMediaUploadForm(
|
||||
await backupsService.credentials.getHeadersForToday()
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw missingCaseError(uploadType);
|
||||
}
|
||||
|
||||
const encrypted = await encryptAttachmentV2ToDisk({
|
||||
plaintext,
|
||||
keys,
|
||||
});
|
||||
|
||||
absoluteCiphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encrypted.path
|
||||
);
|
||||
|
||||
await uploadFile({
|
||||
absoluteCiphertextPath,
|
||||
ciphertextFileSize: encrypted.ciphertextSize,
|
||||
uploadForm,
|
||||
});
|
||||
|
||||
return { cdnKey: uploadForm.key, cdnNumber: uploadForm.cdn, encrypted };
|
||||
} finally {
|
||||
if (absoluteCiphertextPath) {
|
||||
safeUnlinkSync(absoluteCiphertextPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadFile({
|
||||
absoluteCiphertextPath,
|
||||
ciphertextFileSize,
|
||||
uploadForm,
|
||||
}: {
|
||||
absoluteCiphertextPath: string;
|
||||
ciphertextFileSize: number;
|
||||
uploadForm: AttachmentV3ResponseType;
|
||||
}): Promise<void> {
|
||||
const { server } = window.textsecure;
|
||||
strictAssert(server, 'WebAPI must be initialized');
|
||||
|
||||
if (CDNS_SUPPORTING_TUS.has(uploadForm.cdn)) {
|
||||
const fetchFn = server.createFetchForAttachmentUpload(uploadForm);
|
||||
await tusUpload({
|
||||
endpoint: uploadForm.signedUploadLocation,
|
||||
// the upload form headers are already included in the created fetch function
|
||||
headers: {},
|
||||
fileName: uploadForm.key,
|
||||
filePath: absoluteCiphertextPath,
|
||||
fileSize: ciphertextFileSize,
|
||||
reader: defaultFileReader,
|
||||
fetchFn,
|
||||
});
|
||||
} else {
|
||||
await server.putEncryptedAttachment(
|
||||
createReadStream(absoluteCiphertextPath),
|
||||
uploadForm
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,13 @@ function toLogId(input: string) {
|
|||
return Buffer.from(input).toString('base64').slice(0, 3);
|
||||
}
|
||||
|
||||
function redactedUrl(endpoint: string) {
|
||||
const redacted = new URL(endpoint);
|
||||
redacted.search = '';
|
||||
redacted.pathname = '';
|
||||
return `${redacted}[REDACTED]`;
|
||||
}
|
||||
|
||||
/**
|
||||
* This file is a standalone implementation of the TUS protocol.
|
||||
* Signal specific logic is in uploads.ts
|
||||
|
@ -94,7 +101,10 @@ export async function _tusCreateWithUploadRequest({
|
|||
signal?: AbortSignal;
|
||||
fetchFn?: FetchFunctionType;
|
||||
}): Promise<boolean> {
|
||||
const logId = `tusProtocol: CreateWithUpload(${toLogId(fileName)})`;
|
||||
const logId = `tusProtocol: CreateWithUpload(${toLogId(
|
||||
fileName
|
||||
)}): POST ${redactedUrl(endpoint)}`;
|
||||
|
||||
if (onProgress != null) {
|
||||
addProgressHandler(readable, onProgress);
|
||||
}
|
||||
|
@ -160,7 +170,10 @@ export async function _tusGetCurrentOffsetRequest({
|
|||
signal?: AbortSignal;
|
||||
fetchFn?: FetchFunctionType;
|
||||
}): Promise<number> {
|
||||
const logId = `tusProtocol: GetCurrentOffsetRequest(${toLogId(fileName)})`;
|
||||
const logId = `tusProtocol: GetCurrentOffsetRequest(${toLogId(
|
||||
fileName
|
||||
)}): HEAD ${redactedUrl(endpoint)}`;
|
||||
|
||||
log.info(`${logId} init`);
|
||||
|
||||
const response = await fetchFn(`${endpoint}/${fileName}`, {
|
||||
|
@ -219,7 +232,9 @@ export async function _tusResumeUploadRequest({
|
|||
signal?: AbortSignal;
|
||||
fetchFn?: FetchFunctionType;
|
||||
}): Promise<boolean> {
|
||||
const logId = `tusProtocol: ResumeUploadRequest(${toLogId(fileName)})`;
|
||||
const logId = `tusProtocol: ResumeUploadRequest(${toLogId(
|
||||
fileName
|
||||
)}): PATCH ${redactedUrl(endpoint)}`;
|
||||
if (onProgress != null) {
|
||||
addProgressHandler(readable, onProgress);
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue