Make appendPadding infer file size
This commit is contained in:
parent
4caa260a22
commit
6e2d5dc516
2 changed files with 96 additions and 43 deletions
|
@ -11,8 +11,8 @@ import {
|
|||
randomBytes,
|
||||
} from 'crypto';
|
||||
import type { Decipher, Hash, Hmac } from 'crypto';
|
||||
import type { TransformCallback } from 'stream';
|
||||
import { Transform } from 'stream';
|
||||
import type { Readable } from 'stream';
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { ensureFile } from 'fs-extra';
|
||||
import * as log from './logging/log';
|
||||
|
@ -55,12 +55,10 @@ export type DecryptedAttachmentV2 = {
|
|||
export async function encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath,
|
||||
size,
|
||||
dangerousTestOnlyIv,
|
||||
}: {
|
||||
keys: Readonly<Uint8Array>;
|
||||
plaintextAbsolutePath: string;
|
||||
size: number;
|
||||
dangerousTestOnlyIv?: Readonly<Uint8Array>;
|
||||
}): Promise<EncryptedAttachmentV2> {
|
||||
const logId = 'encryptAttachmentV2';
|
||||
|
@ -98,7 +96,7 @@ export async function encryptAttachmentV2({
|
|||
await pipeline(
|
||||
readFd.createReadStream(),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
appendPadding(size),
|
||||
appendPadding(),
|
||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependIv(iv),
|
||||
appendMac(macKey),
|
||||
|
@ -402,52 +400,57 @@ function* generatePadding(size: number) {
|
|||
}
|
||||
}
|
||||
|
||||
// Push as much padding as we can. If we reach the end
|
||||
// of the padding, return true.
|
||||
function pushPadding(
|
||||
paddingIterator: Iterator<Uint8Array>,
|
||||
readable: Readable
|
||||
): boolean {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const result = paddingIterator.next();
|
||||
if (result.done) {
|
||||
break;
|
||||
}
|
||||
const keepGoing = readable.push(result.value);
|
||||
if (!keepGoing) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends zero-padding to the stream to a target bucket size.
|
||||
*/
|
||||
function appendPadding(fileSize: number) {
|
||||
const iterator = generatePadding(fileSize);
|
||||
let bytesWritten = 0;
|
||||
let finalCallback: TransformCallback;
|
||||
|
||||
// Push as much padding as we can. If we reach the end
|
||||
// of the padding, call the callback.
|
||||
function pushPadding(transform: Transform) {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const result = iterator.next();
|
||||
if (result.done) {
|
||||
break;
|
||||
}
|
||||
const keepGoing = transform.push(result.value);
|
||||
if (!keepGoing) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
finalCallback();
|
||||
}
|
||||
function appendPadding() {
|
||||
let onReadableDrained: undefined | (() => void);
|
||||
let fileSize = 0;
|
||||
|
||||
return new Transform({
|
||||
read(size) {
|
||||
// When in the process of pushing padding, we pause and wait for
|
||||
// read to be called again.
|
||||
if (finalCallback != null) {
|
||||
pushPadding(this);
|
||||
if (onReadableDrained != null) {
|
||||
onReadableDrained();
|
||||
}
|
||||
// Always call _read, even if we're done.
|
||||
Transform.prototype._read.call(this, size);
|
||||
},
|
||||
transform(chunk, _encoding, callback) {
|
||||
bytesWritten += chunk.byteLength;
|
||||
// Once we reach the end of the file, start pushing padding.
|
||||
if (bytesWritten >= fileSize) {
|
||||
this.push(chunk);
|
||||
finalCallback = callback;
|
||||
pushPadding(this);
|
||||
return;
|
||||
}
|
||||
fileSize += chunk.byteLength;
|
||||
callback(null, chunk);
|
||||
},
|
||||
flush(callback) {
|
||||
const iterator = generatePadding(fileSize);
|
||||
|
||||
onReadableDrained = () => {
|
||||
if (pushPadding(iterator, this)) {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
onReadableDrained();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
// Copyright 2015 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
|
||||
import { assert } from 'chai';
|
||||
import { readFileSync, unlinkSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
@ -30,6 +28,7 @@ import {
|
|||
deriveMasterKeyFromGroupV1,
|
||||
encryptSymmetric,
|
||||
decryptSymmetric,
|
||||
sha256,
|
||||
hmacSha256,
|
||||
verifyHmacSha256,
|
||||
randomInt,
|
||||
|
@ -609,7 +608,7 @@ describe('Crypto', () => {
|
|||
describe('attachments', () => {
|
||||
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
|
||||
const FILE_CONTENTS = readFileSync(FILE_PATH);
|
||||
let tempDir: string | undefined;
|
||||
let tempDir: string;
|
||||
|
||||
function generateAttachmentKeys(): Uint8Array {
|
||||
return randomBytes(KEY_SET_LENGTH);
|
||||
|
@ -643,7 +642,7 @@ describe('Crypto', () => {
|
|||
|
||||
it('v1 -> v2 (memory -> disk)', async () => {
|
||||
const keys = generateAttachmentKeys();
|
||||
const ciphertextPath = join(tempDir!, 'file');
|
||||
const ciphertextPath = join(tempDir, 'file');
|
||||
let plaintextPath;
|
||||
|
||||
try {
|
||||
|
@ -679,7 +678,7 @@ describe('Crypto', () => {
|
|||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips (all on disk)', async () => {
|
||||
it('v2 roundtrips smaller file (all on disk)', async () => {
|
||||
const keys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
|
@ -688,7 +687,6 @@ describe('Crypto', () => {
|
|||
const encryptedAttachment = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
|
@ -720,6 +718,60 @@ describe('Crypto', () => {
|
|||
}
|
||||
});
|
||||
|
||||
it('v2 roundtrips random data (all on disk)', async () => {
|
||||
const sourcePath = join(tempDir, 'random');
|
||||
// Get sufficient large file to have more than 64kb of padding and
|
||||
// trigger push back on the streams.
|
||||
const data = getRandomBytes(5 * 1024 * 1024);
|
||||
const digest = sha256(data);
|
||||
|
||||
writeFileSync(sourcePath, data);
|
||||
|
||||
const keys = generateAttachmentKeys();
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
|
||||
try {
|
||||
const encryptedAttachment = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath: sourcePath,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
);
|
||||
const decryptedAttachment = await decryptAttachmentV2({
|
||||
ciphertextPath,
|
||||
id: 'test',
|
||||
keys,
|
||||
size: data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
});
|
||||
plaintextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
decryptedAttachment.path
|
||||
);
|
||||
const plaintext = readFileSync(plaintextPath);
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.plaintextHash,
|
||||
Bytes.toHex(digest)
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
} finally {
|
||||
if (sourcePath) {
|
||||
unlinkSync(sourcePath);
|
||||
}
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
}
|
||||
if (ciphertextPath) {
|
||||
unlinkSync(ciphertextPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('v2 -> v1 (disk -> memory)', async () => {
|
||||
const keys = generateAttachmentKeys();
|
||||
let ciphertextPath;
|
||||
|
@ -728,7 +780,6 @@ describe('Crypto', () => {
|
|||
const encryptedAttachment = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
encryptedAttachment.path
|
||||
|
@ -782,7 +833,6 @@ describe('Crypto', () => {
|
|||
const encryptedAttachmentV2 = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintextAbsolutePath: FILE_PATH,
|
||||
size: FILE_CONTENTS.byteLength,
|
||||
dangerousTestOnlyIv,
|
||||
});
|
||||
ciphertextPath = window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
|
|
Loading…
Reference in a new issue