Use streams to download attachments directly to disk

Co-authored-by: trevor-signal <131492920+trevor-signal@users.noreply.github.com>
This commit is contained in:
Scott Nonnenberg 2023-10-30 09:24:28 -07:00 committed by GitHub
parent 2da49456c6
commit 99b2bc304e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
48 changed files with 2297 additions and 356 deletions

View file

@ -2571,6 +2571,14 @@
"messageformat": "This message was deleted.",
"description": "Shown in a message's bubble when the message has been deleted for everyone."
},
"icu:message--attachmentTooBig--one": {
"messageformat": "Attachment too large to display.",
"description": "Shown in a message bubble if no attachments are left on message when too-large attachments are dropped"
},
"icu:message--attachmentTooBig--multiple": {
"messageformat": "Some attachments are too large to display.",
"description": "Shown in a message bubble if any attachments are left on message when too-large attachments are dropped"
},
"icu:donation--missing": {
"messageformat": "Unable to fetch donation details",
"description": "Aria label for donation when we can't fetch the details."

View file

@ -558,6 +558,60 @@ $message-padding-horizontal: 12px;
}
}
.module-message__attachment-too-big {
user-select: none;
margin-inline: -$message-padding-horizontal;
margin-top: -$message-padding-vertical;
margin-bottom: -$message-padding-vertical;
padding-top: $message-padding-vertical;
padding-bottom: $message-padding-vertical;
padding-inline: $message-padding-horizontal;
border-radius: 18px;
@include font-body-1-italic;
@include light-theme {
color: $color-gray-90;
border: 1px solid $color-gray-05;
background-color: $color-white;
background-image: none;
}
@include dark-theme {
color: $color-gray-05;
border: 1px solid $color-gray-75;
background-color: $color-gray-95;
background-image: none;
}
}
.module-message__attachment-too-big--content-above {
border-top-left-radius: 0;
border-top-right-radius: 0;
}
.module-message__attachment-too-big--content-below {
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
border-bottom: none;
margin-bottom: 7px;
}
.module-message__attachment-too-big--collapse-above--incoming {
border-top-left-radius: 4px;
}
.module-message__attachment-too-big--collapse-above--outgoing {
border-top-right-radius: 4px;
}
.module-message__attachment-too-big--collapse-below--incoming {
border-bottom-left-radius: 4px;
}
.module-message__attachment-too-big--collapse-below--outgoing {
border-bottom-right-radius: 4px;
}
.module-message__tap-to-view {
margin-top: 2px;
display: flex;
@ -1165,7 +1219,7 @@ $message-padding-horizontal: 12px;
pointer-events: none;
}
.module-message__metadata--deleted-for-everyone {
.module-message__metadata--outline-only-bubble {
@include light-theme {
color: $color-gray-60;
}
@ -1207,7 +1261,7 @@ $message-padding-horizontal: 12px;
color: $color-white-alpha-80;
}
}
.module-message__metadata__date--deleted-for-everyone {
.module-message__metadata__date--outline-only-bubble {
@include light-theme {
color: $color-gray-60;
}
@ -1319,7 +1373,7 @@ $message-padding-horizontal: 12px;
}
}
.module-message__metadata__status-icon--deleted-for-everyone {
.module-message__metadata__status-icon--outline-only-bubble {
@include light-theme {
background-color: $color-gray-60;
}
@ -1916,7 +1970,7 @@ $timer-icons: '55', '50', '45', '40', '35', '30', '25', '20', '15', '10', '05',
}
}
.module-expire-timer--deleted-for-everyone {
.module-expire-timer--outline-only-bubble {
@include light-theme {
background-color: $color-gray-60;
}
@ -2662,7 +2716,7 @@ button.ConversationDetails__action-button {
.module-image__border-overlay--with-border {
@include light-theme {
box-shadow: inset 0px 0px 0px 1px $color-black-alpha-20;
box-shadow: inset 0px 0px 0px 1px $color-black-alpha-085;
}
@include dark-theme {
box-shadow: inset 0px 0px 0px 1px $color-white-alpha-20;

View file

@ -48,6 +48,8 @@ $color-white-alpha-90: rgba($color-white, 0.9);
$color-black-alpha-05: rgba($color-black, 0.05);
$color-black-alpha-06: rgba($color-black, 0.06);
$color-black-alpha-08: rgba($color-black, 0.08);
// Equivalent to gray-05 on a white background
$color-black-alpha-085: rgba($color-black, 0.085);
$color-black-alpha-12: rgba($color-black, 0.12);
$color-black-alpha-16: rgba($color-black, 0.16);
$color-black-alpha-20: rgba($color-black, 0.2);

784
ts/AttachmentCrypto.ts Normal file
View file

@ -0,0 +1,784 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-disable max-classes-per-file */
import {
existsSync,
createReadStream,
createWriteStream,
unlinkSync,
} from 'fs';
import {
createDecipheriv,
createCipheriv,
createHash,
createHmac,
} from 'crypto';
import type { Cipher, Decipher, Hash, Hmac } from 'crypto';
import { ensureFile } from 'fs-extra';
import { Transform } from 'stream';
import { pipeline } from 'stream/promises';
import * as log from './logging/log';
import * as Errors from './types/errors';
import { HashType, CipherType } from './types/Crypto';
import { createName, getRelativePath } from './windows/attachments';
import {
constantTimeEqual,
getAttachmentSizeBucket,
getRandomBytes,
getZeroes,
} from './Crypto';
import { Environment } from './environment';
// This file was split from ts/Crypto.ts because it pulls things in from node, and
// too many things pull in Crypto.ts, so it broke storybook.
export const IV_LENGTH = 16;
export const KEY_LENGTH = 32;
export const ATTACHMENT_MAC_LENGTH = 32;
export type EncryptedAttachmentV2 = {
path: string;
digest: Uint8Array;
};
export async function encryptAttachmentV2({
keys,
plaintextAbsolutePath,
size,
dangerousTestOnlyIv,
}: {
keys: Readonly<Uint8Array>;
plaintextAbsolutePath: string;
size: number;
dangerousTestOnlyIv?: Readonly<Uint8Array>;
}): Promise<EncryptedAttachmentV2> {
const logId = 'encryptAttachmentV2';
if (keys.byteLength !== KEY_LENGTH * 2) {
throw new Error(`${logId}: Got invalid length attachment keys`);
}
if (!existsSync(plaintextAbsolutePath)) {
throw new Error(`${logId}: Target path doesn't exist!`);
}
// Create random output file
const relativeTargetPath = getRelativePath(createName());
const absoluteTargetPath =
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
await ensureFile(absoluteTargetPath);
// Create start and end streams
const readStream = createReadStream(plaintextAbsolutePath);
const writeStream = createWriteStream(absoluteTargetPath);
const aesKey = keys.slice(0, KEY_LENGTH);
const macKey = keys.slice(KEY_LENGTH, KEY_LENGTH * 2);
if (dangerousTestOnlyIv && window.getEnvironment() !== Environment.Test) {
throw new Error(`${logId}: Used dangerousTestOnlyIv outside tests!`);
}
const iv = dangerousTestOnlyIv || getRandomBytes(16);
const addPaddingTransform = new AddPaddingTransform(size);
const cipherTransform = new CipherTransform(iv, aesKey);
const addIvTransform = new AddIvTransform(iv);
const addMacTransform = new AddMacTransform(macKey);
const digestTransform = new DigestTransform();
try {
await pipeline(
readStream,
addPaddingTransform,
cipherTransform,
addIvTransform,
addMacTransform,
digestTransform,
writeStream
);
} catch (error) {
try {
readStream.close();
writeStream.close();
} catch (cleanupError) {
log.error(
`${logId}: Failed to clean up after error`,
Errors.toLogFormat(cleanupError)
);
}
if (existsSync(absoluteTargetPath)) {
unlinkSync(absoluteTargetPath);
}
throw error;
}
const { ourDigest } = digestTransform;
if (!ourDigest || !ourDigest.byteLength) {
throw new Error(`${logId}: Failed to generate ourDigest!`);
}
writeStream.close();
readStream.close();
return {
path: relativeTargetPath,
digest: ourDigest,
};
}
export async function decryptAttachmentV2({
ciphertextPath,
id,
keys,
size,
theirDigest,
}: {
ciphertextPath: string;
id: string;
keys: Readonly<Uint8Array>;
size: number;
theirDigest: Readonly<Uint8Array>;
}): Promise<string> {
const logId = `decryptAttachmentV2(${id})`;
if (keys.byteLength !== KEY_LENGTH * 2) {
throw new Error(`${logId}: Got invalid length attachment keys`);
}
if (!existsSync(ciphertextPath)) {
throw new Error(`${logId}: Target path doesn't exist!`);
}
// Create random output file
const relativeTargetPath = getRelativePath(createName());
const absoluteTargetPath =
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
await ensureFile(absoluteTargetPath);
// Create start and end streams
const readStream = createReadStream(ciphertextPath);
const writeStream = createWriteStream(absoluteTargetPath);
const aesKey = keys.slice(0, KEY_LENGTH);
const macKey = keys.slice(KEY_LENGTH, KEY_LENGTH * 2);
const digestTransform = new DigestTransform();
const macTransform = new MacTransform(macKey);
const decipherTransform = new DecipherTransform(aesKey);
const coreDecryptionTransform = new CoreDecryptionTransform(
decipherTransform
);
const limitLengthTransform = new LimitLengthTransform(size);
try {
await pipeline(
readStream,
digestTransform,
macTransform,
coreDecryptionTransform,
decipherTransform,
limitLengthTransform,
writeStream
);
} catch (error) {
try {
readStream.close();
writeStream.close();
} catch (cleanupError) {
log.error(
`${logId}: Failed to clean up after error`,
Errors.toLogFormat(cleanupError)
);
}
if (existsSync(absoluteTargetPath)) {
unlinkSync(absoluteTargetPath);
}
throw error;
}
const { ourMac } = macTransform;
const { theirMac } = coreDecryptionTransform;
if (!ourMac || !ourMac.byteLength) {
throw new Error(`${logId}: Failed to generate ourMac!`);
}
if (!theirMac || !theirMac.byteLength) {
throw new Error(`${logId}: Failed to find theirMac!`);
}
if (!constantTimeEqual(ourMac, theirMac)) {
throw new Error(`${logId}: Bad MAC`);
}
const { ourDigest } = digestTransform;
if (!ourDigest || !ourDigest.byteLength) {
throw new Error(`${logId}: Failed to generate ourDigest!`);
}
if (!constantTimeEqual(ourDigest, theirDigest)) {
throw new Error(`${logId}: Bad digest`);
}
writeStream.close();
readStream.close();
return relativeTargetPath;
}
// A very simple transform that doesn't modify the stream, but does calculate a digest
// across all data it gets.
class DigestTransform extends Transform {
private digestBuilder: Hash;
public ourDigest: Uint8Array | undefined;
constructor() {
super();
this.digestBuilder = createHash(HashType.size256);
}
override _flush(done: (error?: Error) => void) {
try {
this.ourDigest = this.digestBuilder.digest();
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
this.digestBuilder.update(chunk);
this.push(chunk);
} catch (error) {
done(error);
return;
}
done();
}
}
// A more complex transform that also doesn't modify the stream, calculating an HMAC
// across everything but the last bytes of the stream.
class MacTransform extends Transform {
public ourMac: Uint8Array | undefined;
private macBuilder: Hmac;
private lastBytes: Uint8Array | undefined;
constructor(macKey: Uint8Array) {
super();
if (macKey.byteLength !== KEY_LENGTH) {
throw new Error(
`MacTransform: macKey should be ${KEY_LENGTH} bytes, got ${macKey.byteLength} bytes`
);
}
this.macBuilder = createHmac('sha256', Buffer.from(macKey));
}
override _flush(done: (error?: Error) => void) {
try {
this.ourMac = this.macBuilder.digest();
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
// We'll continue building up data if our chunk sizes are too small to fit MAC
const data = this.lastBytes
? Buffer.concat([this.lastBytes, chunk])
: chunk;
// Compute new last bytes from this chunk
const lastBytesIndex = Math.max(
0,
data.byteLength - ATTACHMENT_MAC_LENGTH
);
this.lastBytes = data.subarray(lastBytesIndex);
// Update hmac with data we know is not the last bytes
if (lastBytesIndex > 0) {
this.macBuilder.update(data.subarray(0, lastBytesIndex));
}
this.push(chunk);
} catch (error) {
done(error);
return;
}
done();
}
}
// The core of the decryption algorithm - it grabs the iv and initializes the
// DecipherTransform provided to it. It also modifies the stream, only passing on the
// data between the iv and the mac at the end.
class CoreDecryptionTransform extends Transform {
private lastBytes: Uint8Array | undefined;
public iv: Uint8Array | undefined;
public theirMac: Uint8Array | undefined;
constructor(private decipherTransform: DecipherTransform) {
super();
}
override _flush(done: (error?: Error) => void) {
try {
if (
!this.lastBytes ||
this.lastBytes.byteLength !== ATTACHMENT_MAC_LENGTH
) {
throw new Error(
`CoreDecryptionTransform: didn't get expected ${ATTACHMENT_MAC_LENGTH} bytes for mac, got ${this.lastBytes?.byteLength}!`
);
}
this.theirMac = this.lastBytes;
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
let data = chunk;
// Grab the first bytes from data if we haven't already
if (!this.iv) {
this.iv = chunk.subarray(0, IV_LENGTH);
data = chunk.subarray(IV_LENGTH);
if (this.iv.byteLength !== IV_LENGTH) {
throw new Error(
`CoreDecryptionTransform: didn't get expected ${IV_LENGTH} bytes for iv, got ${this.iv.byteLength}!`
);
}
this.decipherTransform.initializeDecipher(this.iv);
}
// Add previous last bytes to this new chunk
if (this.lastBytes) {
data = Buffer.concat([this.lastBytes, data]);
}
// Compute new last bytes from this chunk - if this chunk doesn't fit the MAC, we
// build across multiple chunks to get there.
const macIndex = Math.max(0, data.byteLength - ATTACHMENT_MAC_LENGTH);
this.lastBytes = data.subarray(macIndex);
if (macIndex > 0) {
this.push(data.subarray(0, macIndex));
}
} catch (error) {
done(error);
return;
}
done();
}
}
// The transform that does the actual deciphering. It doesn't have enough information to
// start working until the first chunk is processed upstream, hence its public
// initializeDecipher() function.
class DecipherTransform extends Transform {
private decipher: Decipher | undefined;
constructor(private aesKey: Uint8Array) {
super();
if (aesKey.byteLength !== KEY_LENGTH) {
throw new Error(
`DecipherTransform: aesKey should be ${KEY_LENGTH} bytes, got ${aesKey.byteLength} bytes`
);
}
}
public initializeDecipher(iv: Uint8Array) {
if (iv.byteLength !== IV_LENGTH) {
throw new Error(
`DecipherTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes`
);
}
this.decipher = createDecipheriv(
CipherType.AES256CBC,
Buffer.from(this.aesKey),
Buffer.from(iv)
);
}
override _flush(done: (error?: Error) => void) {
if (!this.decipher) {
done(
new Error(
"DecipherTransform: _flush called, but decipher isn't initialized"
)
);
return;
}
try {
this.push(this.decipher.final());
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!this.decipher) {
done(
new Error(
"DecipherTransform: got a chunk, but decipher isn't initialized"
)
);
return;
}
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
this.push(this.decipher.update(chunk));
} catch (error) {
done(error);
return;
}
done();
}
}
// A simple transform that limits the provided data to `size` bytes. We use this to
// discard the padding on the incoming plaintext data.
class LimitLengthTransform extends Transform {
private bytesWritten = 0;
constructor(private size: number) {
super();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
const chunkLength = chunk.byteLength;
const sizeLeft = this.size - this.bytesWritten;
if (sizeLeft >= chunkLength) {
this.bytesWritten += chunkLength;
this.push(chunk);
} else if (sizeLeft > 0) {
this.bytesWritten += sizeLeft;
this.push(chunk.subarray(0, sizeLeft));
}
} catch (error) {
done(error);
return;
}
done();
}
}
// This is an unusual transform, in that it can produce quite a bit more data than it is
// provided. That's because it computes a bucket size for the provided size, which may
// be quite a bit bigger than the attachment, and then needs to provide those zeroes
// at the end of the stream.
const PADDING_CHUNK_SIZE = 64 * 1024;
class AddPaddingTransform extends Transform {
private bytesWritten = 0;
private targetLength: number;
private paddingChunksToWrite: Array<number> = [];
private paddingCallback: ((error?: Error) => void) | undefined;
constructor(private size: number) {
super();
this.targetLength = getAttachmentSizeBucket(size);
}
override _read(size: number): void {
if (this.paddingChunksToWrite.length > 0) {
// Restart our efforts to push padding downstream
this.pushPaddingChunks();
} else {
Transform.prototype._read.call(this, size);
}
}
public pushPaddingChunks(): boolean {
while (this.paddingChunksToWrite.length > 0) {
const [first, ...rest] = this.paddingChunksToWrite;
this.paddingChunksToWrite = rest;
const zeroes = getZeroes(first);
if (!this.push(zeroes)) {
// We shouldn't push any more; if we have more to push, we'll do it after a read()
break;
}
}
if (this.paddingChunksToWrite.length > 0) {
return false;
}
this.paddingCallback?.();
return true;
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
const chunkLength = chunk.byteLength;
const contentsStillNeeded = this.size - this.bytesWritten;
if (contentsStillNeeded >= chunkLength) {
this.push(chunk);
this.bytesWritten += chunkLength;
} else if (contentsStillNeeded > 0) {
throw new Error(
`AddPaddingTransform: chunk length was ${chunkLength} but only ${contentsStillNeeded} bytes needed to get to size ${this.size}`
);
}
if (this.bytesWritten === this.size) {
const paddingNeeded = this.targetLength - this.size;
const chunks = Math.floor(paddingNeeded / PADDING_CHUNK_SIZE);
const remainder = paddingNeeded % PADDING_CHUNK_SIZE;
for (let i = 0; i < chunks; i += 1) {
this.paddingChunksToWrite.push(PADDING_CHUNK_SIZE);
}
if (remainder > 0) {
this.paddingChunksToWrite.push(remainder);
}
if (!this.pushPaddingChunks()) {
// If we didn't push all chunks, we shouldn't call done - we'll keep it around
// to call when we're actually done.
this.paddingCallback = done;
return;
}
}
} catch (error) {
done(error);
return;
}
done();
}
}
// The transform that does the actual ciphering; quite simple in that it applies the
// cipher to all incoming data, and can initialize itself fully in its constructor.
class CipherTransform extends Transform {
private cipher: Cipher;
constructor(private iv: Uint8Array, private aesKey: Uint8Array) {
super();
if (aesKey.byteLength !== KEY_LENGTH) {
throw new Error(
`CipherTransform: aesKey should be ${KEY_LENGTH} bytes, got ${aesKey.byteLength} bytes`
);
}
if (iv.byteLength !== IV_LENGTH) {
throw new Error(
`CipherTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes`
);
}
this.cipher = createCipheriv(
CipherType.AES256CBC,
Buffer.from(this.aesKey),
Buffer.from(this.iv)
);
}
override _flush(done: (error?: Error) => void) {
try {
this.push(this.cipher.final());
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
this.push(this.cipher.update(chunk));
} catch (error) {
done(error);
return;
}
done();
}
}
// This very simple transform adds the provided iv data to the beginning of the stream.
class AddIvTransform extends Transform {
public haveAddedIv = false;
constructor(private iv: Uint8Array) {
super();
if (iv.byteLength !== IV_LENGTH) {
throw new Error(
`MacTransform: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes`
);
}
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
if (!this.haveAddedIv) {
this.push(this.iv);
this.haveAddedIv = true;
}
this.push(chunk);
} catch (error) {
done(error);
return;
}
done();
}
}
// This transform both calculates the mac and adds it to the end of the stream.
class AddMacTransform extends Transform {
public ourMac: Uint8Array | undefined;
private macBuilder: Hmac;
constructor(macKey: Uint8Array) {
super();
if (macKey.byteLength !== KEY_LENGTH) {
throw new Error(
`MacTransform: macKey should be ${KEY_LENGTH} bytes, got ${macKey.byteLength} bytes`
);
}
this.macBuilder = createHmac('sha256', Buffer.from(macKey));
}
override _flush(done: (error?: Error) => void) {
try {
this.ourMac = this.macBuilder.digest();
this.push(this.ourMac);
} catch (error) {
done(error);
return;
}
done();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
this.macBuilder.update(chunk);
this.push(chunk);
} catch (error) {
done(error);
return;
}
done();
}
}

View file

@ -10,6 +10,7 @@ import { calculateAgreement, generateKeyPair } from './Curve';
import { HashType, CipherType } from './types/Crypto';
import { ProfileDecryptError } from './types/errors';
import { getBytesSubarray } from './util/uuidToBytes';
import { Environment } from './environment';
export { HashType, CipherType };
@ -173,8 +174,8 @@ export function verifyAccessKey(
}
const IV_LENGTH = 16;
const MAC_LENGTH = 16;
const NONCE_LENGTH = 16;
const SYMMETRIC_MAC_LENGTH = 16;
export function encryptSymmetric(
key: Uint8Array,
@ -187,7 +188,10 @@ export function encryptSymmetric(
const macKey = hmacSha256(key, cipherKey);
const ciphertext = encryptAes256CbcPkcsPadding(cipherKey, plaintext, iv);
const mac = getFirstBytes(hmacSha256(macKey, ciphertext), MAC_LENGTH);
const mac = getFirstBytes(
hmacSha256(macKey, ciphertext),
SYMMETRIC_MAC_LENGTH
);
return Bytes.concatenate([nonce, ciphertext, mac]);
}
@ -202,18 +206,21 @@ export function decryptSymmetric(
const ciphertext = getBytesSubarray(
data,
NONCE_LENGTH,
data.byteLength - NONCE_LENGTH - MAC_LENGTH
data.byteLength - NONCE_LENGTH - SYMMETRIC_MAC_LENGTH
);
const theirMac = getBytesSubarray(
data,
data.byteLength - MAC_LENGTH,
MAC_LENGTH
data.byteLength - SYMMETRIC_MAC_LENGTH,
SYMMETRIC_MAC_LENGTH
);
const cipherKey = hmacSha256(key, nonce);
const macKey = hmacSha256(key, cipherKey);
const ourMac = getFirstBytes(hmacSha256(macKey, ciphertext), MAC_LENGTH);
const ourMac = getFirstBytes(
hmacSha256(macKey, ciphertext),
SYMMETRIC_MAC_LENGTH
);
if (!constantTimeEqual(theirMac, ourMac)) {
throw new Error(
'decryptSymmetric: Failed to decrypt; MAC verification failed'
@ -379,7 +386,7 @@ function verifyDigest(data: Uint8Array, theirDigest: Uint8Array): void {
}
}
export function decryptAttachment(
export function decryptAttachmentV1(
encryptedBin: Uint8Array,
keys: Uint8Array,
theirDigest?: Uint8Array
@ -411,20 +418,31 @@ export function decryptAttachment(
return decryptAes256CbcPkcsPadding(aesKey, ciphertext, iv);
}
export function encryptAttachment(
plaintext: Readonly<Uint8Array>,
keys: Readonly<Uint8Array>
): EncryptedAttachment {
export function encryptAttachment({
plaintext,
keys,
dangerousTestOnlyIv,
}: {
plaintext: Readonly<Uint8Array>;
keys: Readonly<Uint8Array>;
dangerousTestOnlyIv?: Readonly<Uint8Array>;
}): EncryptedAttachment {
const logId = 'encryptAttachment';
if (!(plaintext instanceof Uint8Array)) {
throw new TypeError(
`\`plaintext\` must be an \`Uint8Array\`; got: ${typeof plaintext}`
`${logId}: \`plaintext\` must be an \`Uint8Array\`; got: ${typeof plaintext}`
);
}
if (keys.byteLength !== 64) {
throw new Error('Got invalid length attachment keys');
throw new Error(`${logId}: invalid length attachment keys`);
}
const iv = getRandomBytes(16);
if (dangerousTestOnlyIv && window.getEnvironment() !== Environment.Test) {
throw new Error(`${logId}: Used dangerousTestOnlyIv outside tests!`);
}
const iv = dangerousTestOnlyIv || getRandomBytes(16);
const aesKey = keys.slice(0, 32);
const macKey = keys.slice(32, 64);
@ -450,15 +468,24 @@ export function getAttachmentSizeBucket(size: number): number {
);
}
export function padAndEncryptAttachment(
data: Readonly<Uint8Array>,
keys: Readonly<Uint8Array>
): EncryptedAttachment {
const size = data.byteLength;
export function padAndEncryptAttachment({
plaintext,
keys,
dangerousTestOnlyIv,
}: {
plaintext: Readonly<Uint8Array>;
keys: Readonly<Uint8Array>;
dangerousTestOnlyIv?: Readonly<Uint8Array>;
}): EncryptedAttachment {
const size = plaintext.byteLength;
const paddedSize = getAttachmentSizeBucket(size);
const padding = getZeroes(paddedSize - size);
return encryptAttachment(Bytes.concatenate([data, padding]), keys);
return encryptAttachment({
plaintext: Bytes.concatenate([plaintext, padding]),
keys,
dangerousTestOnlyIv,
});
}
export function encryptProfile(data: Uint8Array, key: Uint8Array): Uint8Array {

View file

@ -45,6 +45,7 @@ export type ConfigKeyType =
| 'desktop.textFormatting'
| 'desktop.usernames'
| 'global.attachments.maxBytes'
| 'global.attachments.maxReceiveBytes'
| 'global.calling.maxGroupCallRingSize'
| 'global.groupsv2.groupSizeHardLimit'
| 'global.groupsv2.maxGroupSize'

View file

@ -7,20 +7,20 @@ import classNames from 'classnames';
import { getIncrement, getTimerBucket } from '../../util/timer';
export type Props = {
deletedForEveryone?: boolean;
direction?: 'incoming' | 'outgoing';
expirationLength: number;
expirationTimestamp?: number;
isOutlineOnlyBubble?: boolean;
withImageNoCaption?: boolean;
withSticker?: boolean;
withTapToViewExpired?: boolean;
};
export function ExpireTimer({
deletedForEveryone,
direction,
expirationLength,
expirationTimestamp,
isOutlineOnlyBubble,
withImageNoCaption,
withSticker,
withTapToViewExpired,
@ -44,7 +44,7 @@ export function ExpireTimer({
'module-expire-timer',
`module-expire-timer--${bucket}`,
direction ? `module-expire-timer--${direction}` : null,
deletedForEveryone ? 'module-expire-timer--deleted-for-everyone' : null,
isOutlineOnlyBubble ? 'module-expire-timer--outline-only-bubble' : null,
withTapToViewExpired
? `module-expire-timer--${direction}-with-tap-to-view-expired`
: null,

View file

@ -75,13 +75,16 @@ function getCurves({
curveTopRight = CurveType.Normal;
}
if (shouldCollapseBelow && direction === 'incoming') {
if (withContentBelow) {
curveBottomLeft = CurveType.None;
curveBottomRight = CurveType.None;
} else if (shouldCollapseBelow && direction === 'incoming') {
curveBottomLeft = CurveType.Tiny;
curveBottomRight = CurveType.None;
} else if (shouldCollapseBelow && direction === 'outgoing') {
curveBottomLeft = CurveType.None;
curveBottomRight = CurveType.Tiny;
} else if (!withContentBelow) {
} else {
curveBottomLeft = CurveType.Normal;
curveBottomRight = CurveType.Normal;
}

View file

@ -284,6 +284,7 @@ export type PropsData = {
reactions?: ReactionViewerProps['reactions'];
deletedForEveryone?: boolean;
attachmentDroppedDueToSize?: boolean;
canDeleteForEveryone: boolean;
isBlocked: boolean;
@ -565,6 +566,7 @@ export class Message extends React.PureComponent<Props, State> {
private getMetadataPlacement(
{
attachments,
attachmentDroppedDueToSize,
deletedForEveryone,
direction,
expirationLength,
@ -599,12 +601,16 @@ export class Message extends React.PureComponent<Props, State> {
return MetadataPlacement.Bottom;
}
if (!text && !deletedForEveryone) {
if (!text && !deletedForEveryone && !attachmentDroppedDueToSize) {
return isAudio(attachments)
? MetadataPlacement.RenderedByMessageAudioComponent
: MetadataPlacement.Bottom;
}
if (!text && attachmentDroppedDueToSize) {
return MetadataPlacement.InlineWithText;
}
if (this.canRenderStickerLikeEmoji()) {
return MetadataPlacement.Bottom;
}
@ -796,6 +802,7 @@ export class Message extends React.PureComponent<Props, State> {
}
const {
attachmentDroppedDueToSize,
deletedForEveryone,
direction,
expirationLength,
@ -822,11 +829,14 @@ export class Message extends React.PureComponent<Props, State> {
direction={direction}
expirationLength={expirationLength}
expirationTimestamp={expirationTimestamp}
hasText={Boolean(text)}
hasText={Boolean(text || attachmentDroppedDueToSize)}
i18n={i18n}
id={id}
isEditedMessage={isEditedMessage}
isInline={isInline}
isOutlineOnlyBubble={
deletedForEveryone || (attachmentDroppedDueToSize && !text)
}
isShowingImage={this.isShowingImage()}
isSticker={isStickerLike}
isTapToViewExpired={isTapToViewExpired}
@ -878,6 +888,7 @@ export class Message extends React.PureComponent<Props, State> {
public renderAttachment(): JSX.Element | null {
const {
attachments,
attachmentDroppedDueToSize,
conversationId,
direction,
expirationLength,
@ -912,7 +923,7 @@ export class Message extends React.PureComponent<Props, State> {
const firstAttachment = attachments[0];
// For attachments which aren't full-frame
const withContentBelow = Boolean(text);
const withContentBelow = Boolean(text || attachmentDroppedDueToSize);
const withContentAbove = Boolean(quote) || this.shouldRenderAuthor();
const displayImage = canDisplayImage(attachments);
@ -1274,6 +1285,62 @@ export class Message extends React.PureComponent<Props, State> {
);
}
public renderAttachmentTooBig(): JSX.Element | null {
const {
attachments,
attachmentDroppedDueToSize,
direction,
i18n,
quote,
shouldCollapseAbove,
shouldCollapseBelow,
text,
} = this.props;
const { metadataWidth } = this.state;
if (!attachmentDroppedDueToSize) {
return null;
}
const labelText = attachments?.length
? i18n('icu:message--attachmentTooBig--multiple')
: i18n('icu:message--attachmentTooBig--one');
const isContentAbove = quote || attachments?.length;
const isContentBelow = Boolean(text);
const willCollapseAbove = shouldCollapseAbove && !isContentAbove;
const willCollapseBelow = shouldCollapseBelow && !isContentBelow;
const maybeSpacer = text
? undefined
: this.getMetadataPlacement() === MetadataPlacement.InlineWithText && (
<MessageTextMetadataSpacer metadataWidth={metadataWidth} />
);
return (
<div
className={classNames(
'module-message__attachment-too-big',
isContentAbove
? 'module-message__attachment-too-big--content-above'
: null,
isContentBelow
? 'module-message__attachment-too-big--content-below'
: null,
willCollapseAbove
? `module-message__attachment-too-big--collapse-above--${direction}`
: null,
willCollapseBelow
? `module-message__attachment-too-big--collapse-below--${direction}`
: null
)}
>
{labelText}
{maybeSpacer}
</div>
);
}
public renderGiftBadge(): JSX.Element | null {
const { conversationTitle, direction, getPreferredBadge, giftBadge, i18n } =
this.props;
@ -1757,6 +1824,19 @@ export class Message extends React.PureComponent<Props, State> {
);
}
private getContents(): string | undefined {
const { deletedForEveryone, direction, i18n, status, text } = this.props;
if (deletedForEveryone) {
return i18n('icu:message--deletedForEveryone');
}
if (direction === 'incoming' && status === 'error') {
return i18n('icu:incomingError');
}
return text;
}
public renderText(): JSX.Element | null {
const {
bodyRanges,
@ -1772,17 +1852,12 @@ export class Message extends React.PureComponent<Props, State> {
showConversation,
showSpoiler,
status,
text,
textAttachment,
} = this.props;
const { metadataWidth } = this.state;
// eslint-disable-next-line no-nested-ternary
const contents = deletedForEveryone
? i18n('icu:message--deletedForEveryone')
: direction === 'incoming' && status === 'error'
? i18n('icu:incomingError')
: text;
const contents = this.getContents();
if (!contents) {
return null;
@ -2296,7 +2371,7 @@ export class Message extends React.PureComponent<Props, State> {
}
public renderContents(): JSX.Element | null {
const { giftBadge, isTapToView, deletedForEveryone } = this.props;
const { deletedForEveryone, giftBadge, isTapToView } = this.props;
if (deletedForEveryone) {
return (
@ -2326,6 +2401,7 @@ export class Message extends React.PureComponent<Props, State> {
{this.renderStoryReplyContext()}
{this.renderAttachment()}
{this.renderPreview()}
{this.renderAttachmentTooBig()}
{this.renderPayment()}
{this.renderEmbeddedContact()}
{this.renderText()}
@ -2534,6 +2610,7 @@ export class Message extends React.PureComponent<Props, State> {
public renderContainer(): JSX.Element {
const {
attachments,
attachmentDroppedDueToSize,
conversationColor,
customColor,
deletedForEveryone,
@ -2597,7 +2674,12 @@ export class Message extends React.PureComponent<Props, State> {
const containerStyles = {
width: shouldUseWidth ? width : undefined,
};
if (!isStickerLike && !deletedForEveryone && direction === 'outgoing') {
if (
!isStickerLike &&
!deletedForEveryone &&
!(attachmentDroppedDueToSize && !text) &&
direction === 'outgoing'
) {
Object.assign(containerStyles, getCustomColorStyle(customColor));
}

View file

@ -28,6 +28,7 @@ type PropsType = {
id: string;
isEditedMessage?: boolean;
isInline?: boolean;
isOutlineOnlyBubble?: boolean;
isShowingImage: boolean;
isSticker?: boolean;
isTapToViewExpired?: boolean;
@ -55,6 +56,7 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
i18n,
id,
isEditedMessage,
isOutlineOnlyBubble,
isInline,
isShowingImage,
isSticker,
@ -136,8 +138,8 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
className={classNames({
'module-message__metadata__date': true,
'module-message__metadata__date--with-sticker': isSticker,
'module-message__metadata__date--deleted-for-everyone':
deletedForEveryone,
'module-message__metadata__date--outline-only-bubble':
isOutlineOnlyBubble,
[`module-message__metadata__date--${direction}`]: !isSticker,
'module-message__metadata__date--with-image-no-caption':
withImageNoCaption,
@ -149,9 +151,9 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
} else {
timestampNode = (
<MessageTimestamp
deletedForEveryone={deletedForEveryone}
direction={metadataDirection}
i18n={i18n}
isOutlineOnlyBubble={isOutlineOnlyBubble}
module="module-message__metadata__date"
timestamp={timestamp}
withImageNoCaption={withImageNoCaption}
@ -195,7 +197,7 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
'module-message__metadata',
isInline && 'module-message__metadata--inline',
withImageNoCaption && 'module-message__metadata--with-image-no-caption',
deletedForEveryone && 'module-message__metadata--deleted-for-everyone'
isOutlineOnlyBubble && 'module-message__metadata--outline-only-bubble'
);
const children = (
<>
@ -212,7 +214,7 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
{expirationLength ? (
<ExpireTimer
direction={metadataDirection}
deletedForEveryone={deletedForEveryone}
isOutlineOnlyBubble={isOutlineOnlyBubble}
expirationLength={expirationLength}
expirationTimestamp={expirationTimestamp}
withImageNoCaption={withImageNoCaption}
@ -240,8 +242,8 @@ export const MessageMetadata = forwardRef<HTMLDivElement, Readonly<PropsType>>(
withImageNoCaption
? 'module-message__metadata__status-icon--with-image-no-caption'
: null,
deletedForEveryone
? 'module-message__metadata__status-icon--deleted-for-everyone'
isOutlineOnlyBubble
? 'module-message__metadata__status-icon--outline-only-bubble'
: null,
isTapToViewExpired
? 'module-message__metadata__status-icon--with-tap-to-view-expired'

View file

@ -12,9 +12,9 @@ import { Time } from '../Time';
import { useNowThatUpdatesEveryMinute } from '../../hooks/useNowThatUpdatesEveryMinute';
export type Props = {
deletedForEveryone?: boolean;
direction?: 'incoming' | 'outgoing';
i18n: LocalizerType;
isOutlineOnlyBubble?: boolean;
isRelativeTime?: boolean;
module?: string;
timestamp: number;
@ -24,10 +24,10 @@ export type Props = {
};
export function MessageTimestamp({
deletedForEveryone,
direction,
i18n,
isRelativeTime,
isOutlineOnlyBubble,
module,
timestamp,
withImageNoCaption,
@ -47,7 +47,7 @@ export function MessageTimestamp({
: null,
withImageNoCaption ? `${moduleName}--with-image-no-caption` : null,
withSticker ? `${moduleName}--with-sticker` : null,
deletedForEveryone ? `${moduleName}--deleted-for-everyone` : null
isOutlineOnlyBubble ? `${moduleName}--ouline-only-bubble` : null
)}
timestamp={timestamp}
>

View file

@ -244,6 +244,7 @@ const renderAudioAttachment: Props['renderAudioAttachment'] = props => (
const createProps = (overrideProps: Partial<Props> = {}): Props => ({
attachments: overrideProps.attachments,
attachmentDroppedDueToSize: overrideProps.attachmentDroppedDueToSize || false,
author: overrideProps.author || getDefaultConversation(),
bodyRanges: overrideProps.bodyRanges,
canCopy: true,
@ -835,6 +836,25 @@ CanDeleteForEveryone.args = {
direction: 'outgoing',
};
export function AttachmentTooBig(): JSX.Element {
const propsSent = createProps({
conversationType: 'direct',
attachmentDroppedDueToSize: true,
});
return <>{renderBothDirections(propsSent)}</>;
}
export function AttachmentTooBigWithText(): JSX.Element {
const propsSent = createProps({
conversationType: 'direct',
attachmentDroppedDueToSize: true,
text: 'Check out this file!',
});
return <>{renderBothDirections(propsSent)}</>;
}
export const Error = Template.bind({});
Error.args = {
status: 'error',
@ -1233,6 +1253,51 @@ MultipleImages5.args = {
status: 'sent',
};
export const MultipleImagesWithOneTooBig = Template.bind({});
MultipleImagesWithOneTooBig.args = {
attachments: [
fakeAttachment({
url: pngUrl,
fileName: 'the-sax.png',
contentType: IMAGE_PNG,
height: 240,
width: 320,
}),
fakeAttachment({
url: pngUrl,
fileName: 'the-sax.png',
contentType: IMAGE_PNG,
height: 240,
width: 320,
}),
],
attachmentDroppedDueToSize: true,
status: 'sent',
};
export const MultipleImagesWithBodyTextOneTooBig = Template.bind({});
MultipleImagesWithBodyTextOneTooBig.args = {
attachments: [
fakeAttachment({
url: pngUrl,
fileName: 'the-sax.png',
contentType: IMAGE_PNG,
height: 240,
width: 320,
}),
fakeAttachment({
url: pngUrl,
fileName: 'the-sax.png',
contentType: IMAGE_PNG,
height: 240,
width: 320,
}),
],
attachmentDroppedDueToSize: true,
text: 'Hey, check out these images!',
status: 'sent',
};
export const ImageWithCaption = Template.bind({});
ImageWithCaption.args = {
attachments: [
@ -1968,6 +2033,7 @@ PaymentNotification.args = {
function MultiSelectMessage() {
const [selected, setSelected] = React.useState(false);
return (
<TimelineMessage
{...createProps({

View file

@ -4,7 +4,7 @@
import { useContext, createContext, useEffect, useRef } from 'react';
import * as log from '../logging/log';
type ScrollerLock = Readonly<{
export type ScrollerLock = Readonly<{
isLocked(): boolean;
lock(reason: string, onUserInterrupt: () => void): () => void;
onUserInterrupt(reason: string): void;

View file

@ -612,6 +612,7 @@ export async function fetchLinkPreviewImage(
const { blob: xcodedDataBlob } = await scaleImageToLevel(
dataBlob,
contentType,
dataBlob.size,
false
);
const xcodedDataArrayBuffer = await blobToArrayBuffer(xcodedDataBlob);

View file

@ -15,12 +15,21 @@ import type {
AttachmentDownloadJobTypeType,
} from '../sql/Interface';
import { getValue } from '../RemoteConfig';
import type { MessageModel } from '../models/messages';
import type { AttachmentType } from '../types/Attachment';
import { getAttachmentSignature, isDownloaded } from '../types/Attachment';
import {
AttachmentSizeError,
getAttachmentSignature,
isDownloaded,
} from '../types/Attachment';
import * as Errors from '../types/errors';
import type { LoggerType } from '../types/Logging';
import * as log from '../logging/log';
import {
KIBIBYTE,
getMaximumIncomingAttachmentSizeInKb,
} from '../types/AttachmentSize';
const {
getMessageById,
@ -269,13 +278,40 @@ async function _runJob(job?: AttachmentDownloadJobType): Promise<void> {
return;
}
await _addAttachmentToMessage(
message,
{ ...attachment, pending: true },
{ type, index }
);
let downloaded: AttachmentType | null = null;
const downloaded = await downloadAttachment(attachment);
try {
const { size } = attachment;
const maxInKib = getMaximumIncomingAttachmentSizeInKb(getValue);
const sizeInKib = size / KIBIBYTE;
if (!size || sizeInKib > maxInKib) {
throw new AttachmentSizeError(
`Attachment Job ${id}: Attachment was ${sizeInKib}kib, max is ${maxInKib}kib`
);
}
await _addAttachmentToMessage(
message,
{ ...attachment, pending: true },
{ type, index }
);
// If the download is bigger than expected, we'll stop in the middle
downloaded = await downloadAttachment(attachment);
} catch (error) {
if (error instanceof AttachmentSizeError) {
log.error(Errors.toLogFormat(error));
await _addAttachmentToMessage(
message,
_markAttachmentAsTooBig(attachment),
{ type, index }
);
await _finishJob(message, id);
return;
}
throw error;
}
if (!downloaded) {
logger.warn(
@ -444,6 +480,14 @@ function _markAttachmentAsPermanentError(
};
}
function _markAttachmentAsTooBig(attachment: AttachmentType): AttachmentType {
return {
...omit(attachment, ['key', 'id']),
error: true,
wasTooBig: true,
};
}
function _markAttachmentAsTransientError(
attachment: AttachmentType
): AttachmentType {

13
ts/model-types.d.ts vendored
View file

@ -17,7 +17,7 @@ import type { GroupNameCollisionsWithIdsByTitle } from './util/groupMemberNameCo
import type { AttachmentDraftType, AttachmentType } from './types/Attachment';
import type { EmbeddedContactType } from './types/EmbeddedContact';
import { SignalService as Proto } from './protobuf';
import type { AvatarDataType } from './types/Avatar';
import type { AvatarDataType, ContactAvatarType } from './types/Avatar';
import type { AciString, PniString, ServiceIdString } from './types/ServiceId';
import type { StoryDistributionIdString } from './types/StoryDistributionId';
import type { SeenStatus } from './MessageSeenStatus';
@ -331,10 +331,7 @@ export type ConversationAttributesType = {
messageRequestResponseType?: number;
muteExpiresAt?: number;
dontNotifyForMentionsIfMuted?: boolean;
profileAvatar?: null | {
hash: string;
path: string;
};
profileAvatar?: ContactAvatarType | null;
profileKeyCredential?: string | null;
profileKeyCredentialExpiration?: number | null;
lastProfile?: ConversationLastProfileType;
@ -415,11 +412,7 @@ export type ConversationAttributesType = {
addFromInviteLink: AccessRequiredEnum;
};
announcementsOnly?: boolean;
avatar?: {
url: string;
path: string;
hash?: string;
} | null;
avatar?: ContactAvatarType | null;
avatars?: Array<AvatarDataType>;
description?: string;
expireTimer?: DurationInSeconds;

View file

@ -4660,8 +4660,8 @@ export class ConversationModel extends window.Backbone
if (decrypted) {
const newAttributes = await Conversation.maybeUpdateProfileAvatar(
this.attributes,
decrypted,
{
data: decrypted,
writeNewAttachmentData,
deleteAttachmentData,
doesAttachmentExist,

View file

@ -4,7 +4,7 @@
import PQueue from 'p-queue';
import type { ContactSyncEvent } from '../textsecure/messageReceiverEvents';
import type { ModifiedContactDetails } from '../textsecure/ContactsParser';
import type { ContactDetailsWithAvatar } from '../textsecure/ContactsParser';
import { normalizeAci } from '../util/normalizeAci';
import * as Conversation from '../types/Conversation';
import * as Errors from '../types/errors';
@ -13,6 +13,7 @@ import type { ConversationModel } from '../models/conversations';
import { validateConversation } from '../util/validateConversation';
import { isDirectConversation, isMe } from '../util/whatTypeOfConversation';
import * as log from '../logging/log';
import { dropNull } from '../util/dropNull';
// When true - we are running the very first storage and contact sync after
// linking.
@ -25,7 +26,7 @@ export function setIsInitialSync(newValue: boolean): void {
async function updateConversationFromContactSync(
conversation: ConversationModel,
details: ModifiedContactDetails,
details: ContactDetailsWithAvatar,
receivedAtCounter: number,
sentAt: number
): Promise<void> {
@ -33,17 +34,17 @@ async function updateConversationFromContactSync(
window.Signal.Migrations;
conversation.set({
name: details.name,
inbox_position: details.inboxPosition,
name: dropNull(details.name),
inbox_position: dropNull(details.inboxPosition),
});
// Update the conversation avatar only if new avatar exists and hash differs
const { avatar } = details;
if (avatar && avatar.data) {
if (avatar && avatar.path) {
const newAttributes = await Conversation.maybeUpdateAvatar(
conversation.attributes,
avatar.data,
{
newAvatar: avatar,
writeNewAttachmentData,
deleteAttachmentData,
doesAttachmentExist,

View file

@ -51,7 +51,7 @@ import {
suspendLinkPreviews,
} from '../../services/LinkPreview';
import {
getMaximumAttachmentSizeInKb,
getMaximumOutgoingAttachmentSizeInKb,
getRenderDetailsForLimit,
KIBIBYTE,
} from '../../types/AttachmentSize';
@ -1167,7 +1167,7 @@ function preProcessAttachment(
// Putting this after everything else because the other checks are more
// important to show to the user.
const limitKb = getMaximumAttachmentSizeInKb(getRemoteConfigValue);
const limitKb = getMaximumOutgoingAttachmentSizeInKb(getRemoteConfigValue);
if (file.size / KIBIBYTE > limitKb) {
return {
toastType: ToastType.FileSize,

View file

@ -676,6 +676,9 @@ export const getPropsForMessage = (
message: MessageWithUIFieldsType,
options: GetPropsForMessageOptions
): Omit<PropsForMessage, 'renderingContext' | 'menu' | 'contextMenu'> => {
const attachmentDroppedDueToSize = message.attachments?.some(
item => item.wasTooBig
);
const attachments = getAttachmentsForMessage(message);
const bodyRanges = processBodyRanges(message, options);
const author = getAuthorForMessage(message, options);
@ -734,6 +737,7 @@ export const getPropsForMessage = (
return {
attachments,
attachmentDroppedDueToSize,
author,
bodyRanges,
previews,

View file

@ -8,7 +8,7 @@ import type { LocalizerType } from '../../types/Util';
import type { StateType } from '../reducer';
import { SmartStoryCreator } from './StoryCreator';
import { StoriesTab } from '../../components/StoriesTab';
import { getMaximumAttachmentSizeInKb } from '../../types/AttachmentSize';
import { getMaximumOutgoingAttachmentSizeInKb } from '../../types/AttachmentSize';
import type { ConfigKeyType } from '../../RemoteConfig';
import { getMe } from '../selectors/conversations';
import { getIntl, getTheme } from '../selectors/user';
@ -74,7 +74,7 @@ export function SmartStoriesTab(): JSX.Element | null {
const otherTabsUnreadStats = useSelector(getOtherTabsUnreadStats);
const remoteConfig = useSelector(getRemoteConfig);
const maxAttachmentSizeInKb = getMaximumAttachmentSizeInKb(
const maxAttachmentSizeInKb = getMaximumOutgoingAttachmentSizeInKb(
(name: ConfigKeyType) => {
const value = remoteConfig[name]?.value;
return value ? String(value) : undefined;

View file

@ -1,70 +0,0 @@
// Copyright 2015 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import protobuf from '../protobuf/wrap';
import * as Bytes from '../Bytes';
import { SignalService as Proto } from '../protobuf';
import { ContactBuffer } from '../textsecure/ContactsParser';
const { Writer } = protobuf;
describe('ContactsParser', () => {
function generateAvatar(): Uint8Array {
const result = new Uint8Array(255);
for (let i = 0; i < result.length; i += 1) {
result[i] = i;
}
return result;
}
describe('ContactBuffer', () => {
function getTestBuffer(): Uint8Array {
const avatarBuffer = generateAvatar();
const contactInfoBuffer = Proto.ContactDetails.encode({
name: 'Zero Cool',
number: '+10000000000',
aci: '7198E1BD-1293-452A-A098-F982FF201902',
avatar: { contentType: 'image/jpeg', length: avatarBuffer.length },
}).finish();
const writer = new Writer();
writer.bytes(contactInfoBuffer);
const prefixedContact = writer.finish();
const chunks: Array<Uint8Array> = [];
for (let i = 0; i < 3; i += 1) {
chunks.push(prefixedContact);
chunks.push(avatarBuffer);
}
return Bytes.concatenate(chunks);
}
it('parses an array buffer of contacts', () => {
const bytes = getTestBuffer();
const contactBuffer = new ContactBuffer(bytes);
let contact = contactBuffer.next();
let count = 0;
while (contact !== undefined) {
count += 1;
assert.strictEqual(contact.name, 'Zero Cool');
assert.strictEqual(contact.number, '+10000000000');
assert.strictEqual(contact.aci, '7198e1bd-1293-452a-a098-f982ff201902');
assert.strictEqual(contact.avatar?.contentType, 'image/jpeg');
assert.strictEqual(contact.avatar?.length, 255);
assert.strictEqual(contact.avatar?.data.byteLength, 255);
const avatarBytes = new Uint8Array(
contact.avatar?.data || new Uint8Array(0)
);
for (let j = 0; j < 255; j += 1) {
assert.strictEqual(avatarBytes[j], j);
}
contact = contactBuffer.next();
}
assert.strictEqual(count, 3);
});
});
});

View file

@ -15,6 +15,8 @@ export const fakeAttachment = (
width: 800,
height: 600,
size: 10304,
// This is to get rid of the download buttons on most of our stories
path: 'ab/ablahblahblah',
...overrides,
});

View file

@ -0,0 +1,277 @@
// Copyright 2015 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { createReadStream, readFileSync, unlinkSync, writeFileSync } from 'fs';
import { v4 as generateGuid } from 'uuid';
import { join } from 'path';
import { pipeline } from 'stream/promises';
import { Transform } from 'stream';
import protobuf from '../protobuf/wrap';
import * as log from '../logging/log';
import * as Bytes from '../Bytes';
import * as Errors from '../types/errors';
import { SignalService as Proto } from '../protobuf';
import {
ParseContactsTransform,
parseContactsV2,
} from '../textsecure/ContactsParser';
import type { ContactDetailsWithAvatar } from '../textsecure/ContactsParser';
import { createTempDir, deleteTempDir } from '../updater/common';
import { strictAssert } from '../util/assert';
const { Writer } = protobuf;
describe('ContactsParser', () => {
let tempDir: string;
beforeEach(async () => {
tempDir = await createTempDir();
});
afterEach(async () => {
await deleteTempDir(log, tempDir);
});
describe('parseContactsV2', () => {
it('parses an array buffer of contacts', async () => {
let absolutePath: string | undefined;
try {
const bytes = getTestBuffer();
const fileName = generateGuid();
absolutePath = join(tempDir, fileName);
writeFileSync(absolutePath, bytes);
const contacts = await parseContactsV2({ absolutePath });
assert.strictEqual(contacts.length, 3);
contacts.forEach(contact => {
verifyContact(contact);
});
} finally {
if (absolutePath) {
unlinkSync(absolutePath);
}
}
});
it('parses an array buffer of contacts with small chunk size', async () => {
let absolutePath: string | undefined;
try {
const bytes = getTestBuffer();
const fileName = generateGuid();
absolutePath = join(tempDir, fileName);
writeFileSync(absolutePath, bytes);
const contacts = await parseContactsWithSmallChunkSize({
absolutePath,
});
assert.strictEqual(contacts.length, 3);
contacts.forEach(contact => {
verifyContact(contact);
});
} finally {
if (absolutePath) {
unlinkSync(absolutePath);
}
}
});
it('parses an array buffer of contacts where one contact has no avatar', async () => {
let absolutePath: string | undefined;
try {
const bytes = Bytes.concatenate([
generatePrefixedContact(undefined),
getTestBuffer(),
]);
const fileName = generateGuid();
absolutePath = join(tempDir, fileName);
writeFileSync(absolutePath, bytes);
const contacts = await parseContactsWithSmallChunkSize({
absolutePath,
});
assert.strictEqual(contacts.length, 4);
contacts.forEach((contact, index) => {
const avatarIsMissing = index === 0;
verifyContact(contact, avatarIsMissing);
});
} finally {
if (absolutePath) {
unlinkSync(absolutePath);
}
}
});
it('parses an array buffer of contacts where contacts are dropped due to missing ACI', async () => {
let absolutePath: string | undefined;
try {
const avatarBuffer = generateAvatar();
const bytes = Bytes.concatenate([
generatePrefixedContact(avatarBuffer, 'invalid'),
avatarBuffer,
generatePrefixedContact(undefined, 'invalid'),
getTestBuffer(),
]);
const fileName = generateGuid();
absolutePath = join(tempDir, fileName);
writeFileSync(absolutePath, bytes);
const contacts = await parseContactsWithSmallChunkSize({
absolutePath,
});
assert.strictEqual(contacts.length, 3);
contacts.forEach(contact => {
verifyContact(contact);
});
} finally {
if (absolutePath) {
unlinkSync(absolutePath);
}
}
});
});
});
class SmallChunksTransform extends Transform {
constructor(private chunkSize: number) {
super();
}
override _transform(
incomingChunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!incomingChunk || incomingChunk.byteLength === 0) {
done();
return;
}
try {
const totalSize = incomingChunk.byteLength;
const chunkCount = Math.floor(totalSize / this.chunkSize);
const remainder = totalSize % this.chunkSize;
for (let i = 0; i < chunkCount; i += 1) {
const start = i * this.chunkSize;
const end = start + this.chunkSize;
this.push(incomingChunk.subarray(start, end));
}
if (remainder > 0) {
this.push(incomingChunk.subarray(chunkCount * this.chunkSize));
}
} catch (error) {
done(error);
return;
}
done();
}
}
function generateAvatar(): Uint8Array {
const result = new Uint8Array(255);
for (let i = 0; i < result.length; i += 1) {
result[i] = i;
}
return result;
}
function getTestBuffer(): Uint8Array {
const avatarBuffer = generateAvatar();
const prefixedContact = generatePrefixedContact(avatarBuffer);
const chunks: Array<Uint8Array> = [];
for (let i = 0; i < 3; i += 1) {
chunks.push(prefixedContact);
chunks.push(avatarBuffer);
}
return Bytes.concatenate(chunks);
}
function generatePrefixedContact(
avatarBuffer: Uint8Array | undefined,
aci = '7198E1BD-1293-452A-A098-F982FF201902'
) {
const contactInfoBuffer = Proto.ContactDetails.encode({
name: 'Zero Cool',
number: '+10000000000',
aci,
avatar: avatarBuffer
? { contentType: 'image/jpeg', length: avatarBuffer.length }
: undefined,
}).finish();
const writer = new Writer();
writer.bytes(contactInfoBuffer);
const prefixedContact = writer.finish();
return prefixedContact;
}
function verifyContact(
contact: ContactDetailsWithAvatar,
avatarIsMissing?: boolean
) {
assert.strictEqual(contact.name, 'Zero Cool');
assert.strictEqual(contact.number, '+10000000000');
assert.strictEqual(contact.aci, '7198e1bd-1293-452a-a098-f982ff201902');
if (avatarIsMissing) {
return;
}
const path = contact.avatar?.path;
strictAssert(path, 'Avatar needs path');
const absoluteAttachmentPath =
window.Signal.Migrations.getAbsoluteAttachmentPath(path);
const avatarBytes = readFileSync(absoluteAttachmentPath);
unlinkSync(absoluteAttachmentPath);
for (let j = 0; j < 255; j += 1) {
assert.strictEqual(avatarBytes[j], j);
}
}
async function parseContactsWithSmallChunkSize({
absolutePath,
}: {
absolutePath: string;
}): Promise<ReadonlyArray<ContactDetailsWithAvatar>> {
const logId = 'parseContactsWithSmallChunkSize';
const readStream = createReadStream(absolutePath);
const smallChunksTransform = new SmallChunksTransform(32);
const parseContactsTransform = new ParseContactsTransform();
try {
await pipeline(readStream, smallChunksTransform, parseContactsTransform);
} catch (error) {
try {
readStream.close();
} catch (cleanupError) {
log.error(
`${logId}: Failed to clean up after error`,
Errors.toLogFormat(cleanupError)
);
}
throw error;
}
readStream.close();
return parseContactsTransform.contacts;
}

View file

@ -1,8 +1,13 @@
// Copyright 2015 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { assert } from 'chai';
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
import { join } from 'path';
import * as log from '../logging/log';
import * as Bytes from '../Bytes';
import * as Curve from '../Curve';
import {
@ -27,7 +32,12 @@ import {
hmacSha256,
verifyHmacSha256,
randomInt,
encryptAttachment,
decryptAttachmentV1,
padAndEncryptAttachment,
} from '../Crypto';
import { decryptAttachmentV2, encryptAttachmentV2 } from '../AttachmentCrypto';
import { createTempDir, deleteTempDir } from '../updater/common';
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
const BUCKET_SIZES = [
@ -586,4 +596,188 @@ describe('Crypto', () => {
assert.strictEqual(count, 0, failures.join('\n'));
});
});
describe('attachments', () => {
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
const FILE_CONTENTS = readFileSync(FILE_PATH);
let tempDir: string | undefined;
beforeEach(async () => {
tempDir = await createTempDir();
});
afterEach(async () => {
if (tempDir) {
await deleteTempDir(log, tempDir);
}
});
it('v1 roundtrips (memory only)', () => {
const keys = getRandomBytes(64);
// Note: support for padding is not in decryptAttachmentV1, so we don't pad here
const encryptedAttachment = encryptAttachment({
plaintext: FILE_CONTENTS,
keys,
});
const plaintext = decryptAttachmentV1(
encryptedAttachment.ciphertext,
keys,
encryptedAttachment.digest
);
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
});
it('v1 -> v2 (memory -> disk)', async () => {
const keys = getRandomBytes(64);
const ciphertextPath = join(tempDir!, 'file');
let plaintextPath;
try {
const encryptedAttachment = padAndEncryptAttachment({
plaintext: FILE_CONTENTS,
keys,
});
writeFileSync(ciphertextPath, encryptedAttachment.ciphertext);
const plaintextRelativePath = await decryptAttachmentV2({
ciphertextPath,
id: 'test',
keys,
size: FILE_CONTENTS.byteLength,
theirDigest: encryptedAttachment.digest,
});
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
plaintextRelativePath
);
const plaintext = readFileSync(plaintextPath);
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
} finally {
if (plaintextPath) {
unlinkSync(plaintextPath);
}
}
});
it('v2 roundtrips (all on disk)', async () => {
const keys = getRandomBytes(64);
let plaintextPath;
let ciphertextPath;
try {
const encryptedAttachment = await encryptAttachmentV2({
keys,
plaintextAbsolutePath: FILE_PATH,
size: FILE_CONTENTS.byteLength,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
);
const plaintextRelativePath = await decryptAttachmentV2({
ciphertextPath,
id: 'test',
keys,
size: FILE_CONTENTS.byteLength,
theirDigest: encryptedAttachment.digest,
});
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
plaintextRelativePath
);
const plaintext = readFileSync(plaintextPath);
assert.isTrue(constantTimeEqual(FILE_CONTENTS, plaintext));
} finally {
if (plaintextPath) {
unlinkSync(plaintextPath);
}
if (ciphertextPath) {
unlinkSync(ciphertextPath);
}
}
});
it('v2 -> v1 (disk -> memory)', async () => {
const keys = getRandomBytes(64);
let ciphertextPath;
try {
const encryptedAttachment = await encryptAttachmentV2({
keys,
plaintextAbsolutePath: FILE_PATH,
size: FILE_CONTENTS.byteLength,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachment.path
);
const ciphertext = readFileSync(ciphertextPath);
const plaintext = decryptAttachmentV1(
ciphertext,
keys,
encryptedAttachment.digest
);
const IV = 16;
const MAC = 32;
const PADDING_FOR_GHOST_KITTY = 126_066; // delta between file size and next bucket
assert.strictEqual(
plaintext.byteLength,
FILE_CONTENTS.byteLength + IV + MAC + PADDING_FOR_GHOST_KITTY,
'verify padding'
);
// Note: support for padding is not in decryptAttachmentV1, so we manually unpad
const plaintextWithoutPadding = plaintext.subarray(
0,
FILE_CONTENTS.byteLength
);
assert.isTrue(
constantTimeEqual(FILE_CONTENTS, plaintextWithoutPadding)
);
} finally {
if (ciphertextPath) {
unlinkSync(ciphertextPath);
}
}
});
it('v1 and v2 produce the same ciphertext, given same iv', async () => {
const keys = getRandomBytes(64);
let ciphertextPath;
const dangerousTestOnlyIv = getRandomBytes(16);
try {
const encryptedAttachmentV1 = padAndEncryptAttachment({
plaintext: FILE_CONTENTS,
keys,
dangerousTestOnlyIv,
});
const ciphertextV1 = encryptedAttachmentV1.ciphertext;
const encryptedAttachmentV2 = await encryptAttachmentV2({
keys,
plaintextAbsolutePath: FILE_PATH,
size: FILE_CONTENTS.byteLength,
dangerousTestOnlyIv,
});
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
encryptedAttachmentV2.path
);
const ciphertextV2 = readFileSync(ciphertextPath);
assert.strictEqual(ciphertextV1.byteLength, ciphertextV2.byteLength);
assert.isTrue(constantTimeEqual(ciphertextV1, ciphertextV2));
} finally {
if (ciphertextPath) {
unlinkSync(ciphertextPath);
}
}
});
});
});

View file

@ -35,7 +35,12 @@ describe('scaleImageToLevel', () => {
testCases.map(
async ({ path, contentType, expectedWidth, expectedHeight }) => {
const blob = await getBlob(path);
const scaled = await scaleImageToLevel(blob, contentType, true);
const scaled = await scaleImageToLevel(
blob,
contentType,
blob.size,
true
);
const data = await loadImage(scaled.blob, { orientation: true });
const { originalWidth: width, originalHeight: height } = data;
@ -56,7 +61,7 @@ describe('scaleImageToLevel', () => {
'Test setup failure: expected fixture to have EXIF data'
);
const scaled = await scaleImageToLevel(original, IMAGE_JPEG, true);
const scaled = await scaleImageToLevel(original, IMAGE_JPEG, original.size);
assert.isUndefined(
(await loadImage(scaled.blob, { meta: true, orientation: true })).exif
);

View file

@ -165,6 +165,7 @@ describe('Contact', () => {
avatar: fakeAttachment({
pending: true,
contentType: IMAGE_GIF,
path: undefined,
}),
},
};

View file

@ -1,159 +1,233 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-disable max-classes-per-file */
import protobuf from '../protobuf/wrap';
import { createReadStream } from 'fs';
import { Transform } from 'stream';
import { pipeline } from 'stream/promises';
import { SignalService as Proto } from '../protobuf';
import protobuf from '../protobuf/wrap';
import { normalizeAci } from '../util/normalizeAci';
import { isAciString } from '../util/isAciString';
import { DurationInSeconds } from '../util/durations';
import * as Errors from '../types/errors';
import * as log from '../logging/log';
import type { ContactAvatarType } from '../types/Avatar';
import { computeHash } from '../Crypto';
import { dropNull } from '../util/dropNull';
import Avatar = Proto.ContactDetails.IAvatar;
const { Reader } = protobuf;
type OptionalFields = { avatar?: Avatar | null; expireTimer?: number | null };
type DecoderBase<Message extends OptionalFields> = {
decodeDelimited(reader: protobuf.Reader): Message | undefined;
type OptionalFields = {
avatar?: Avatar | null;
expireTimer?: number | null;
number?: string | null;
};
type HydratedAvatar = Avatar & { data: Uint8Array };
type MessageWithAvatar<Message extends OptionalFields> = Omit<
Message,
'avatar'
'avatar' | 'toJSON'
> & {
avatar?: HydratedAvatar;
avatar?: ContactAvatarType;
expireTimer?: DurationInSeconds;
number?: string | undefined;
};
export type ModifiedContactDetails = MessageWithAvatar<Proto.ContactDetails>;
export type ContactDetailsWithAvatar = MessageWithAvatar<Proto.IContactDetails>;
/* eslint-disable @typescript-eslint/brace-style -- Prettier conflicts with ESLint */
abstract class ParserBase<
Message extends OptionalFields,
Decoder extends DecoderBase<Message>,
Result
> implements Iterable<Result>
{
/* eslint-enable @typescript-eslint/brace-style */
export async function parseContactsV2({
absolutePath,
}: {
absolutePath: string;
}): Promise<ReadonlyArray<ContactDetailsWithAvatar>> {
const logId = 'parseContactsV2';
protected readonly reader: protobuf.Reader;
const readStream = createReadStream(absolutePath);
const parseContactsTransform = new ParseContactsTransform();
constructor(bytes: Uint8Array, private readonly decoder: Decoder) {
this.reader = new Reader(bytes);
try {
await pipeline(readStream, parseContactsTransform);
} catch (error) {
try {
readStream.close();
} catch (cleanupError) {
log.error(
`${logId}: Failed to clean up after error`,
Errors.toLogFormat(cleanupError)
);
}
throw error;
}
protected decodeDelimited(): MessageWithAvatar<Message> | undefined {
if (this.reader.pos === this.reader.len) {
return undefined; // eof
readStream.close();
return parseContactsTransform.contacts;
}
// This transform pulls contacts and their avatars from a stream of bytes. This is tricky,
// because the chunk boundaries might fall in the middle of a contact or their avatar.
// So we are ready for decodeDelimited() to throw, and to keep activeContact around
// while we wait for more chunks to get to the expected avatar size.
// Note: exported only for testing
export class ParseContactsTransform extends Transform {
public contacts: Array<ContactDetailsWithAvatar> = [];
public activeContact: Proto.ContactDetails | undefined;
private unused: Uint8Array | undefined;
override async _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
): Promise<void> {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
const proto = this.decoder.decodeDelimited(this.reader);
if (!proto) {
return undefined;
let data = chunk;
if (this.unused) {
data = Buffer.concat([this.unused, data]);
this.unused = undefined;
}
let avatar: HydratedAvatar | undefined;
if (proto.avatar) {
const attachmentLen = proto.avatar.length ?? 0;
const avatarData = this.reader.buf.slice(
this.reader.pos,
this.reader.pos + attachmentLen
);
this.reader.skip(attachmentLen);
const reader = Reader.create(data);
while (reader.pos < reader.len) {
const startPos = reader.pos;
avatar = {
...proto.avatar,
if (!this.activeContact) {
try {
this.activeContact = Proto.ContactDetails.decodeDelimited(reader);
} catch (err) {
// We get a RangeError if there wasn't enough data to read the next record.
if (err instanceof RangeError) {
// Note: A failed decodeDelimited() does in fact update reader.pos, so we
// must reset to startPos
this.unused = data.subarray(startPos);
done();
return;
}
data: avatarData,
};
// Something deeper has gone wrong; the proto is malformed or something
done(err);
return;
}
}
// Something has really gone wrong if the above parsing didn't throw but gave
// us nothing back. Let's end the parse.
if (!this.activeContact) {
done(new Error('ParseContactsTransform: No active contact!'));
return;
}
const attachmentSize = this.activeContact?.avatar?.length ?? 0;
if (attachmentSize === 0) {
// No avatar attachment for this contact
const prepared = prepareContact(this.activeContact);
if (prepared) {
this.contacts.push(prepared);
}
this.activeContact = undefined;
continue;
}
const spaceLeftAfterRead = reader.len - (reader.pos + attachmentSize);
if (spaceLeftAfterRead >= 0) {
// We've read enough data to read the entire attachment
const avatarData = reader.buf.slice(
reader.pos,
reader.pos + attachmentSize
);
const hash = computeHash(data);
// eslint-disable-next-line no-await-in-loop
const path = await window.Signal.Migrations.writeNewAttachmentData(
avatarData
);
const prepared = prepareContact(this.activeContact, {
...this.activeContact.avatar,
hash,
path,
});
if (prepared) {
this.contacts.push(prepared);
} else {
// eslint-disable-next-line no-await-in-loop
await window.Signal.Migrations.deleteAttachmentData(path);
}
this.activeContact = undefined;
reader.skip(attachmentSize);
} else {
// We have an attachment, but we haven't read enough data yet. We need to
// wait for another chunk.
this.unused = data.subarray(reader.pos);
done();
return;
}
}
let expireTimer: DurationInSeconds | undefined;
if (proto.expireTimer != null) {
expireTimer = DurationInSeconds.fromSeconds(proto.expireTimer);
}
return {
...proto,
avatar,
expireTimer,
};
// No need to push; no downstream consumers!
} catch (error) {
log.error('ProtoParser.next error:', Errors.toLogFormat(error));
return undefined;
done(error);
return;
}
}
public abstract next(): Result | undefined;
*[Symbol.iterator](): Iterator<Result> {
let result = this.next();
while (result !== undefined) {
yield result;
result = this.next();
}
done();
}
}
export class ContactBuffer extends ParserBase<
Proto.ContactDetails,
typeof Proto.ContactDetails,
ModifiedContactDetails
> {
constructor(arrayBuffer: Uint8Array) {
super(arrayBuffer, Proto.ContactDetails);
}
function prepareContact(
proto: Proto.ContactDetails,
avatar?: ContactAvatarType
): ContactDetailsWithAvatar | undefined {
const aci = proto.aci
? normalizeAci(proto.aci, 'ContactBuffer.aci')
: proto.aci;
public override next(): ModifiedContactDetails | undefined {
while (this.reader.pos < this.reader.len) {
const proto = this.decodeDelimited();
if (!proto) {
return undefined;
}
const expireTimer =
proto.expireTimer != null
? DurationInSeconds.fromSeconds(proto.expireTimer)
: undefined;
if (!proto.aci) {
return proto;
}
const verified =
proto.verified && proto.verified.destinationAci
? {
...proto.verified,
const { verified } = proto;
destinationAci: normalizeAci(
proto.verified.destinationAci,
'ContactBuffer.verified.destinationAci'
),
}
: proto.verified;
if (
!isAciString(proto.aci) ||
(verified?.destinationAci && !isAciString(verified.destinationAci))
) {
continue;
}
return {
...proto,
verified:
verified && verified.destinationAci
? {
...verified,
destinationAci: normalizeAci(
verified.destinationAci,
'ContactBuffer.verified.destinationAci'
),
}
: verified,
aci: normalizeAci(proto.aci, 'ContactBuffer.aci'),
};
}
// We reject incoming contacts with invalid aci information
if (
(proto.aci && !isAciString(proto.aci)) ||
(proto.verified?.destinationAci &&
!isAciString(proto.verified.destinationAci))
) {
log.warn('ParseContactsTransform: Dropping contact with invalid aci');
return undefined;
}
const result = {
...proto,
expireTimer,
aci,
verified,
avatar,
number: dropNull(proto.number),
};
return result;
}

View file

@ -6,6 +6,8 @@
import { isBoolean, isNumber, isString, omit } from 'lodash';
import PQueue from 'p-queue';
import { v4 as getGuid } from 'uuid';
import { existsSync } from 'fs';
import { removeSync } from 'fs-extra';
import type {
SealedSenderDecryptionResult,
@ -49,7 +51,7 @@ import { parseIntOrThrow } from '../util/parseIntOrThrow';
import { clearTimeoutIfNecessary } from '../util/clearTimeoutIfNecessary';
import { Zone } from '../util/Zone';
import { DurationInSeconds, SECOND } from '../util/durations';
import type { DownloadedAttachmentType } from '../types/Attachment';
import type { AttachmentType } from '../types/Attachment';
import { Address } from '../types/Address';
import { QualifiedAddress } from '../types/QualifiedAddress';
import { normalizeStoryDistributionId } from '../types/StoryDistributionId';
@ -81,9 +83,10 @@ import {
import { processSyncMessage } from './processSyncMessage';
import type { EventHandler } from './EventTarget';
import EventTarget from './EventTarget';
import { downloadAttachment } from './downloadAttachment';
import { downloadAttachmentV2 } from './downloadAttachment';
import type { IncomingWebSocketRequest } from './WebsocketResources';
import { ContactBuffer } from './ContactsParser';
import type { ContactDetailsWithAvatar } from './ContactsParser';
import { parseContactsV2 } from './ContactsParser';
import type { WebAPIType } from './WebAPI';
import type { Storage } from './Storage';
import { WarnOnlyError } from './Errors';
@ -3504,11 +3507,11 @@ export default class MessageReceiver
private async handleContacts(
envelope: ProcessedEnvelope,
contacts: Proto.SyncMessage.IContacts
contactSyncProto: Proto.SyncMessage.IContacts
): Promise<void> {
const logId = getEnvelopeId(envelope);
log.info(`MessageReceiver: handleContacts ${logId}`);
const { blob } = contacts;
const { blob } = contactSyncProto;
if (!blob) {
throw new Error('MessageReceiver.handleContacts: blob field was missing');
}
@ -3517,21 +3520,50 @@ export default class MessageReceiver
this.removeFromCache(envelope);
const attachmentPointer = await this.handleAttachment(blob, {
disableRetries: true,
timeout: 90 * SECOND,
});
const contactBuffer = new ContactBuffer(attachmentPointer.data);
let attachment: AttachmentType | undefined;
try {
attachment = await this.handleAttachmentV2(blob, {
disableRetries: true,
timeout: 90 * SECOND,
});
const contactSync = new ContactSyncEvent(
Array.from(contactBuffer),
Boolean(contacts.complete),
envelope.receivedAtCounter,
envelope.timestamp
);
await this.dispatchAndWait(logId, contactSync);
const { path } = attachment;
if (!path) {
throw new Error('Failed no path field in returned attachment');
}
const absolutePath =
window.Signal.Migrations.getAbsoluteAttachmentPath(path);
if (!existsSync(absolutePath)) {
throw new Error(
'Contact sync attachment had path, but it was not found on disk'
);
}
log.info('handleContacts: finished');
let contacts: ReadonlyArray<ContactDetailsWithAvatar>;
try {
contacts = await parseContactsV2({
absolutePath,
});
} finally {
if (absolutePath) {
removeSync(absolutePath);
}
}
const contactSync = new ContactSyncEvent(
contacts,
Boolean(contactSyncProto.complete),
envelope.receivedAtCounter,
envelope.timestamp
);
await this.dispatchAndWait(logId, contactSync);
log.info('handleContacts: finished');
} finally {
if (attachment?.path) {
await window.Signal.Migrations.deleteAttachmentData(attachment.path);
}
}
}
private async handleBlocked(
@ -3618,12 +3650,12 @@ export default class MessageReceiver
return this.storage.blocked.isGroupBlocked(groupId);
}
private async handleAttachment(
private async handleAttachmentV2(
attachment: Proto.IAttachmentPointer,
options?: { timeout?: number; disableRetries?: boolean }
): Promise<DownloadedAttachmentType> {
): Promise<AttachmentType> {
const cleaned = processAttachment(attachment);
return downloadAttachment(this.server, cleaned, options);
return downloadAttachmentV2(this.server, cleaned, options);
}
private async handleEndSession(

View file

@ -22,7 +22,10 @@ import * as durations from '../util/durations';
import type { ExplodePromiseResultType } from '../util/explodePromise';
import { explodePromise } from '../util/explodePromise';
import { getUserAgent } from '../util/getUserAgent';
import { getStreamWithTimeout } from '../util/getStreamWithTimeout';
import {
getTimeoutStream,
getStreamWithTimeout,
} from '../util/getStreamWithTimeout';
import { formatAcceptLanguageHeader } from '../util/userLanguages';
import { toWebSafeBase64, fromWebSafeBase64 } from '../util/webSafeBase64';
import { getBasicAuth } from '../util/getBasicAuth';
@ -970,6 +973,14 @@ export type WebAPIType = {
timeout?: number;
}
) => Promise<Uint8Array>;
getAttachmentV2: (
cdnKey: string,
cdnNumber?: number,
options?: {
disableRetries?: boolean;
timeout?: number;
}
) => Promise<Readable>;
getAvatar: (path: string) => Promise<Uint8Array>;
getHasSubscription: (subscriberId: Uint8Array) => Promise<boolean>;
getGroup: (options: GroupCredentialsType) => Promise<Proto.Group>;
@ -1386,6 +1397,7 @@ export function initialize({
getArtAuth,
getArtProvisioningSocket,
getAttachment,
getAttachmentV2,
getAvatar,
getBadgeImageFile,
getConfig,
@ -2876,6 +2888,61 @@ export function initialize({
}
}
async function getAttachmentV2(
cdnKey: string,
cdnNumber?: number,
options?: {
disableRetries?: boolean;
timeout?: number;
}
): Promise<Readable> {
const abortController = new AbortController();
const cdnUrl = isNumber(cdnNumber)
? cdnUrlObject[cdnNumber] ?? cdnUrlObject['0']
: cdnUrlObject['0'];
// This is going to the CDN, not the service, so we use _outerAjax
const downloadStream = await _outerAjax(
`${cdnUrl}/attachments/${cdnKey}`,
{
certificateAuthority,
disableRetries: options?.disableRetries,
proxyUrl,
responseType: 'stream',
timeout: options?.timeout || 0,
type: 'GET',
redactUrl: _createRedactor(cdnKey),
version,
abortSignal: abortController.signal,
}
);
const timeoutStream = getTimeoutStream({
name: `getAttachment(${cdnKey})`,
timeout: GET_ATTACHMENT_CHUNK_TIMEOUT,
abortController,
});
const combinedStream = downloadStream
// We do this manually; pipe() doesn't flow errors through the streams for us
.on('error', (error: Error) => {
timeoutStream.emit('error', error);
})
.pipe(timeoutStream);
const cancelRequest = (error: Error) => {
combinedStream.emit('error', error);
abortController.abort();
};
registerInflightRequest(cancelRequest);
combinedStream.on('done', () => {
unregisterInFlightRequest(cancelRequest);
});
return combinedStream;
}
async function putEncryptedAttachment(encryptedBin: Uint8Array) {
const response = attachmentV3Response.parse(
await _ajax({

View file

@ -37,12 +37,12 @@ export async function authorizeArtCreator({
);
const keys = Bytes.concatenate([aesKey, macKey]);
const { ciphertext } = encryptAttachment(
Proto.ArtProvisioningMessage.encode({
const { ciphertext } = encryptAttachment({
plaintext: Proto.ArtProvisioningMessage.encode({
...auth,
}).finish(),
keys
);
keys,
});
const envelope = Proto.ArtProvisioningEnvelope.encode({
publicKey: ourKeys.pubKey,

View file

@ -1,19 +1,40 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { isNumber } from 'lodash';
import { createWriteStream, existsSync, unlinkSync } from 'fs';
import { isNumber, omit } from 'lodash';
import type { Readable } from 'stream';
import { Transform } from 'stream';
import { pipeline } from 'stream/promises';
import { ensureFile } from 'fs-extra';
import * as log from '../logging/log';
import * as Errors from '../types/errors';
import { strictAssert } from '../util/assert';
import { dropNull } from '../util/dropNull';
import type { DownloadedAttachmentType } from '../types/Attachment';
import {
AttachmentSizeError,
type AttachmentType,
type DownloadedAttachmentType,
} from '../types/Attachment';
import * as MIME from '../types/MIME';
import * as Bytes from '../Bytes';
import { getFirstBytes, decryptAttachment } from '../Crypto';
import {
getFirstBytes,
decryptAttachmentV1,
getAttachmentSizeBucket,
} from '../Crypto';
import {
decryptAttachmentV2,
IV_LENGTH,
ATTACHMENT_MAC_LENGTH,
} from '../AttachmentCrypto';
import type { ProcessedAttachment } from './Types.d';
import type { WebAPIType } from './WebAPI';
import { createName, getRelativePath } from '../windows/attachments';
export async function downloadAttachment(
export async function downloadAttachmentV1(
server: WebAPIType,
attachment: ProcessedAttachment,
options?: {
@ -28,7 +49,6 @@ export async function downloadAttachment(
throw new Error('downloadAttachment: Attachment was missing cdnId!');
}
strictAssert(cdnId, 'attachment without cdnId');
const encrypted = await server.getAttachment(
cdnId,
dropNull(cdnNumber),
@ -41,9 +61,8 @@ export async function downloadAttachment(
}
strictAssert(key, 'attachment has no key');
strictAssert(digest, 'attachment has no digest');
const paddedData = decryptAttachment(
const paddedData = decryptAttachmentV1(
encrypted,
Bytes.fromBase64(key),
Bytes.fromBase64(digest)
@ -67,3 +86,132 @@ export async function downloadAttachment(
data,
};
}
export async function downloadAttachmentV2(
server: WebAPIType,
attachment: ProcessedAttachment,
options?: {
disableRetries?: boolean;
timeout?: number;
}
): Promise<AttachmentType> {
const { cdnId, cdnKey, cdnNumber, contentType, digest, key, size } =
attachment;
const cdn = cdnId || cdnKey;
const logId = `downloadAttachmentV2(${cdn}):`;
strictAssert(cdn, `${logId}: missing cdnId or cdnKey`);
strictAssert(digest, `${logId}: missing digest`);
strictAssert(key, `${logId}: missing key`);
strictAssert(isNumber(size), `${logId}: missing size`);
const downloadStream = await server.getAttachmentV2(
cdn,
dropNull(cdnNumber),
options
);
const cipherTextRelativePath = await downloadToDisk({ downloadStream, size });
const cipherTextAbsolutePath =
window.Signal.Migrations.getAbsoluteAttachmentPath(cipherTextRelativePath);
const relativePath = await decryptAttachmentV2({
ciphertextPath: cipherTextAbsolutePath,
id: cdn,
keys: Bytes.fromBase64(key),
size,
theirDigest: Bytes.fromBase64(digest),
});
if (existsSync(cipherTextAbsolutePath)) {
unlinkSync(cipherTextAbsolutePath);
}
return {
...omit(attachment, 'key'),
path: relativePath,
size,
contentType: contentType
? MIME.stringToMIMEType(contentType)
: MIME.APPLICATION_OCTET_STREAM,
};
}
async function downloadToDisk({
downloadStream,
size,
}: {
downloadStream: Readable;
size: number;
}): Promise<string> {
const relativeTargetPath = getRelativePath(createName());
const absoluteTargetPath =
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
await ensureFile(absoluteTargetPath);
const writeStream = createWriteStream(absoluteTargetPath);
const targetSize =
getAttachmentSizeBucket(size) * 1.05 + IV_LENGTH + ATTACHMENT_MAC_LENGTH;
const checkSizeTransform = new CheckSizeTransform(targetSize);
try {
await pipeline(downloadStream, checkSizeTransform, writeStream);
} catch (error) {
try {
writeStream.close();
if (absoluteTargetPath && existsSync(absoluteTargetPath)) {
unlinkSync(absoluteTargetPath);
}
} catch (cleanupError) {
log.error(
'downloadToDisk: Error while cleaning up',
Errors.toLogFormat(cleanupError)
);
}
throw error;
}
return relativeTargetPath;
}
// A simple transform that throws if it sees more than maxBytes on the stream.
class CheckSizeTransform extends Transform {
private bytesSeen = 0;
constructor(private maxBytes: number) {
super();
}
override _transform(
chunk: Buffer | undefined,
_encoding: string,
done: (error?: Error) => void
) {
if (!chunk || chunk.byteLength === 0) {
done();
return;
}
try {
this.bytesSeen += chunk.byteLength;
if (this.bytesSeen > this.maxBytes) {
done(
new AttachmentSizeError(
`CheckSizeTransform: Saw ${this.bytesSeen} bytes, max is ${this.maxBytes} bytes`
)
);
return;
}
this.push(chunk);
} catch (error) {
done(error);
return;
}
done();
}
}

View file

@ -5,7 +5,6 @@ import EventTarget from './EventTarget';
import AccountManager from './AccountManager';
import MessageReceiver from './MessageReceiver';
import utils from './Helpers';
import { ContactBuffer } from './ContactsParser';
import SyncRequest from './SyncRequest';
import MessageSender from './SendMessage';
import { Storage } from './Storage';
@ -17,7 +16,6 @@ export type TextSecureType = {
storage: Storage;
AccountManager: typeof AccountManager;
ContactBuffer: typeof ContactBuffer;
EventTarget: typeof EventTarget;
MessageReceiver: typeof MessageReceiver;
MessageSender: typeof MessageSender;
@ -34,7 +32,6 @@ export const textsecure: TextSecureType = {
storage: new Storage(),
AccountManager,
ContactBuffer,
EventTarget,
MessageReceiver,
MessageSender,

View file

@ -12,7 +12,7 @@ import type {
ProcessedDataMessage,
ProcessedSent,
} from './Types.d';
import type { ModifiedContactDetails } from './ContactsParser';
import type { ContactDetailsWithAvatar } from './ContactsParser';
import type { CallEventDetails, CallLogEvent } from '../types/CallDisposition';
export class EmptyEvent extends Event {
@ -74,7 +74,7 @@ export class ErrorEvent extends Event {
export class ContactSyncEvent extends Event {
constructor(
public readonly contacts: ReadonlyArray<ModifiedContactDetails>,
public readonly contacts: ReadonlyArray<ContactDetailsWithAvatar>,
public readonly complete: boolean,
public readonly receivedAtCounter: number,
public readonly sentAt: number

View file

@ -37,6 +37,8 @@ const MIN_HEIGHT = 50;
// Used for display
export class AttachmentSizeError extends Error {}
export type AttachmentType = {
error?: boolean;
blurHash?: string;
@ -75,6 +77,7 @@ export type AttachmentType = {
key?: string;
data?: Uint8Array;
textAttachment?: TextAttachmentType;
wasTooBig?: boolean;
/** Legacy field. Used only for downloading old attachments */
id?: number;
@ -1008,9 +1011,9 @@ export const defaultBlurHash = (theme: ThemeType = ThemeType.light): string => {
};
export const canBeDownloaded = (
attachment: Pick<AttachmentType, 'key' | 'digest'>
attachment: Pick<AttachmentType, 'digest' | 'key' | 'wasTooBig'>
): boolean => {
return Boolean(attachment.key && attachment.digest);
return Boolean(attachment.digest && attachment.key && !attachment.wasTooBig);
};
export function getAttachmentSignature(attachment: AttachmentType): string {

View file

@ -9,14 +9,14 @@ export const KIBIBYTE = 1024;
const MEBIBYTE = 1024 * 1024;
const DEFAULT_MAX = 100 * MEBIBYTE;
export const getMaximumAttachmentSizeInKb = (
export const getMaximumOutgoingAttachmentSizeInKb = (
getValue: typeof RemoteConfig.getValue
): number => {
try {
return (
parseIntOrThrow(
getValue('global.attachments.maxBytes'),
'preProcessAttachment/maxAttachmentSize'
'getMaximumOutgoingAttachmentSizeInKb'
) / KIBIBYTE
);
} catch (error) {
@ -27,6 +27,22 @@ export const getMaximumAttachmentSizeInKb = (
}
};
export const getMaximumIncomingAttachmentSizeInKb = (
getValue: typeof RemoteConfig.getValue
): number => {
try {
return (
parseIntOrThrow(
getValue('global.attachments.maxReceiveBytes'),
'getMaximumIncomingAttachmentSizeInKb'
) / KIBIBYTE
);
} catch (_error) {
// TODO: DESKTOP-5913. We're not gonna log until the new flag is fully deployed
return getMaximumOutgoingAttachmentSizeInKb(getValue) * 1.25;
}
};
export function getRenderDetailsForLimit(limitKb: number): {
limit: number;
units: string;

View file

@ -34,6 +34,12 @@ export const GroupAvatarIcons = [
'surfboard',
] as const;
export type ContactAvatarType = {
path: string;
url?: string;
hash?: string;
};
type GroupAvatarIconType = typeof GroupAvatarIcons[number];
type PersonalAvatarIconType = typeof PersonalAvatarIcons[number];

View file

@ -2,33 +2,87 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { ConversationAttributesType } from '../model-types.d';
import type { ContactAvatarType } from './Avatar';
import { computeHash } from '../Crypto';
export type BuildAvatarUpdaterOptions = Readonly<{
data?: Uint8Array;
newAvatar?: ContactAvatarType;
deleteAttachmentData: (path: string) => Promise<void>;
doesAttachmentExist: (path: string) => Promise<boolean>;
writeNewAttachmentData: (data: Uint8Array) => Promise<string>;
}>;
// This function is ready to handle raw avatar data as well as an avatar which has
// already been downloaded to disk.
// Scenarios that go to disk today:
// - During a contact sync (see ContactsParser.ts)
// Scenarios that stay in memory today:
// - models/Conversations/setProfileAvatar
function buildAvatarUpdater({ field }: { field: 'avatar' | 'profileAvatar' }) {
return async (
conversation: Readonly<ConversationAttributesType>,
data: Uint8Array,
{
data,
newAvatar,
deleteAttachmentData,
doesAttachmentExist,
writeNewAttachmentData,
}: BuildAvatarUpdaterOptions
): Promise<ConversationAttributesType> => {
if (!conversation) {
if (!conversation || (!data && !newAvatar)) {
return conversation;
}
const avatar = conversation[field];
const oldAvatar = conversation[field];
const newHash = data ? computeHash(data) : undefined;
const newHash = computeHash(data);
if (!oldAvatar || !oldAvatar.hash) {
if (newAvatar) {
return {
...conversation,
[field]: newAvatar,
};
}
if (data) {
return {
...conversation,
[field]: {
hash: newHash,
path: await writeNewAttachmentData(data),
},
};
}
throw new Error('buildAvatarUpdater: neither newAvatar or newData');
}
if (!avatar || !avatar.hash) {
const { hash, path } = oldAvatar;
const exists = await doesAttachmentExist(path);
if (!exists) {
window.SignalContext.log.warn(
`Conversation.buildAvatarUpdater: attachment ${path} did not exist`
);
}
if (exists) {
if (newAvatar && hash && hash === newAvatar.hash) {
await deleteAttachmentData(newAvatar.path);
return conversation;
}
if (data && hash && hash === newHash) {
return conversation;
}
}
await deleteAttachmentData(path);
if (newAvatar) {
return {
...conversation,
[field]: newAvatar,
};
}
if (data) {
return {
...conversation,
[field]: {
@ -38,27 +92,7 @@ function buildAvatarUpdater({ field }: { field: 'avatar' | 'profileAvatar' }) {
};
}
const { hash, path } = avatar;
const exists = await doesAttachmentExist(path);
if (!exists) {
window.SignalContext.log.warn(
`Conversation.buildAvatarUpdater: attachment ${path} did not exist`
);
}
if (exists && hash === newHash) {
return conversation;
}
await deleteAttachmentData(path);
return {
...conversation,
[field]: {
hash: newHash,
path: await writeNewAttachmentData(data),
},
};
throw new Error('buildAvatarUpdater: neither newAvatar or newData');
};
}

View file

@ -593,10 +593,18 @@ export const processNewAttachment = async (
isIncoming: true,
}
);
const onDiskAttachment = await migrateDataToFileSystem(rotatedAttachment, {
writeNewAttachmentData,
logger,
});
let onDiskAttachment = rotatedAttachment;
// If we rotated the attachment, then `data` will be the actual bytes of the attachment,
// in memory. We want that updated attachment to go back to disk.
if (rotatedAttachment.data) {
onDiskAttachment = await migrateDataToFileSystem(rotatedAttachment, {
writeNewAttachmentData,
logger,
});
}
const finalAttachment = await captureDimensionsAndScreenshot(
onDiskAttachment,
{

View file

@ -11,7 +11,7 @@ import { makeLookup } from '../util/makeLookup';
import { maybeParseUrl } from '../util/url';
import * as Bytes from '../Bytes';
import * as Errors from './errors';
import { deriveStickerPackKey, decryptAttachment } from '../Crypto';
import { deriveStickerPackKey, decryptAttachmentV1 } from '../Crypto';
import { IMAGE_WEBP } from './MIME';
import type { MIMEType } from './MIME';
import { sniffImageMimeType } from '../util/sniffImageMimeType';
@ -310,7 +310,10 @@ function getReduxStickerActions() {
function decryptSticker(packKey: string, ciphertext: Uint8Array): Uint8Array {
const binaryKey = Bytes.fromBase64(packKey);
const derivedKey = deriveStickerPackKey(binaryKey);
const plaintext = decryptAttachment(ciphertext, derivedKey);
// Note this download and decrypt in memory is okay because these files are maximum
// 300kb, enforced by the server.
const plaintext = decryptAttachmentV1(ciphertext, derivedKey);
return plaintext;
}

View file

@ -42,17 +42,28 @@ export async function autoOrientJPEG(
// already been scaled to level, oriented, stripped of exif data, and saved
// in high quality format. If we want to send the image in HQ we can return
// the attachment as-is. Otherwise we'll have to further scale it down.
if (!attachment.data || sendHQImages) {
const { data, path, size } = attachment;
if (sendHQImages) {
return attachment;
}
let scaleTarget: string | Blob;
if (path) {
scaleTarget = window.Signal.Migrations.getAbsoluteAttachmentPath(path);
} else {
if (!data) {
return attachment;
}
scaleTarget = new Blob([data], {
type: attachment.contentType,
});
}
const dataBlob = new Blob([attachment.data], {
type: attachment.contentType,
});
try {
const { blob: xcodedDataBlob } = await scaleImageToLevel(
dataBlob,
scaleTarget,
attachment.contentType,
size,
isIncoming
);
const xcodedDataArrayBuffer = await blobToArrayBuffer(xcodedDataBlob);

View file

@ -1,15 +1,12 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type {
AttachmentType,
DownloadedAttachmentType,
} from '../types/Attachment';
import { downloadAttachment as doDownloadAttachment } from '../textsecure/downloadAttachment';
import type { AttachmentType } from '../types/Attachment';
import { downloadAttachmentV2 as doDownloadAttachment } from '../textsecure/downloadAttachment';
export async function downloadAttachment(
attachmentData: AttachmentType
): Promise<DownloadedAttachmentType | null> {
): Promise<AttachmentType | null> {
let migratedAttachment: AttachmentType;
const { server } = window.textsecure;

View file

@ -297,6 +297,16 @@ export function getNotificationDataForMessage(
const attachment = attachments[0] || {};
const { contentType } = attachment;
const tooBigAttachmentCount = attachments.filter(
item => item.wasTooBig
).length;
if (tooBigAttachmentCount === attachments.length) {
return {
emoji: '📎',
text: window.i18n('icu:message--attachmentTooBig--one'),
};
}
if (contentType === MIME.IMAGE_GIF || Attachment.isGIF(attachments)) {
return {
bodyRanges,

View file

@ -1,6 +1,7 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { Transform } from 'stream';
import type { Readable } from 'stream';
import * as Bytes from '../Bytes';
@ -59,3 +60,46 @@ export function getStreamWithTimeout(
return promise;
}
export function getTimeoutStream({
name,
timeout,
abortController,
}: OptionsType): Transform {
const timeoutStream = new Transform();
let timer: NodeJS.Timeout | undefined;
const clearTimer = () => {
clearTimeoutIfNecessary(timer);
timer = undefined;
};
const reset = () => {
clearTimer();
timer = setTimeout(() => {
abortController.abort();
timeoutStream.emit(
'error',
new StreamTimeoutError(`getStreamWithTimeout(${name}) timed out`)
);
clearTimer();
}, timeout);
};
timeoutStream._transform = function transform(chunk, _encoding, done) {
try {
reset();
} catch (error) {
return done(error);
}
this.push(chunk);
done();
};
reset();
return timeoutStream;
}

View file

@ -82,6 +82,7 @@ export async function autoScale({
const { blob, contentType: newContentType } = await scaleImageToLevel(
file,
contentType,
file.size,
true
);

View file

@ -7,7 +7,7 @@ import type {
InMemoryAttachmentDraftType,
} from '../types/Attachment';
import {
getMaximumAttachmentSizeInKb,
getMaximumOutgoingAttachmentSizeInKb,
getRenderDetailsForLimit,
KIBIBYTE,
} from '../types/AttachmentSize';
@ -75,7 +75,7 @@ export async function processAttachment(
}
function isAttachmentSizeOkay(attachment: Readonly<AttachmentType>): boolean {
const limitKb = getMaximumAttachmentSizeInKb(getRemoteConfigValue);
const limitKb = getMaximumOutgoingAttachmentSizeInKb(getRemoteConfigValue);
// this needs to be cast properly
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore

View file

@ -109,8 +109,9 @@ async function getCanvasBlobAsJPEG(
}
export async function scaleImageToLevel(
fileOrBlobOrURL: File | Blob,
fileOrBlobOrURL: File | Blob | string,
contentType: MIMEType,
size: number,
sendAsHighQuality?: boolean
): Promise<{
blob: Blob;
@ -136,10 +137,14 @@ export async function scaleImageToLevel(
const level = sendAsHighQuality
? MediaQualityLevels.Three
: getMediaQualityLevel();
const { maxDimensions, quality, size, thresholdSize } =
MEDIA_QUALITY_LEVEL_DATA.get(level) || DEFAULT_LEVEL_DATA;
const {
maxDimensions,
quality,
size: targetSize,
thresholdSize,
} = MEDIA_QUALITY_LEVEL_DATA.get(level) || DEFAULT_LEVEL_DATA;
if (fileOrBlobOrURL.size <= thresholdSize) {
if (size <= thresholdSize) {
// Always encode through canvas as a temporary fix for a library bug
const blob: Blob = await canvasToBlob(data.image, contentType);
return {
@ -161,7 +166,7 @@ export async function scaleImageToLevel(
scalableDimensions,
quality
);
if (blob.size <= size) {
if (blob.size <= targetSize) {
return {
blob,
contentType: IMAGE_JPEG,

View file

@ -13,18 +13,22 @@ export async function uploadAttachment(
attachment: AttachmentWithHydratedData
): Promise<UploadedAttachmentType> {
const keys = getRandomBytes(64);
const encrypted = padAndEncryptAttachment(attachment.data, keys);
const encrypted = padAndEncryptAttachment({
plaintext: attachment.data,
keys,
});
const { server } = window.textsecure;
strictAssert(server, 'WebAPI must be initialized');
const cdnKey = await server.putEncryptedAttachment(encrypted.ciphertext);
const size = attachment.data.byteLength;
return {
cdnKey,
cdnNumber: 2,
key: keys,
size: attachment.data.byteLength,
size,
digest: encrypted.digest,
contentType: MIMETypeToString(attachment.contentType),