New attachment storage system
This commit is contained in:
parent
273e1ccb15
commit
28664a606f
161 changed files with 2418 additions and 1562 deletions
|
@ -3,45 +3,44 @@
|
|||
|
||||
import { unlinkSync, createReadStream, createWriteStream } from 'fs';
|
||||
import { open } from 'fs/promises';
|
||||
import {
|
||||
createDecipheriv,
|
||||
createCipheriv,
|
||||
createHash,
|
||||
createHmac,
|
||||
randomBytes,
|
||||
} from 'crypto';
|
||||
import type { Decipher, Hash, Hmac } from 'crypto';
|
||||
import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto';
|
||||
import type { Hash } from 'crypto';
|
||||
import { PassThrough, Transform, type Writable, Readable } from 'stream';
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { ensureFile } from 'fs-extra';
|
||||
import * as log from './logging/log';
|
||||
import { HashType, CipherType } from './types/Crypto';
|
||||
import { createName, getRelativePath } from './windows/attachments';
|
||||
import {
|
||||
HashType,
|
||||
CipherType,
|
||||
IV_LENGTH,
|
||||
KEY_LENGTH,
|
||||
MAC_LENGTH,
|
||||
} from './types/Crypto';
|
||||
import { constantTimeEqual } from './Crypto';
|
||||
import { createName, getRelativePath } from './util/attachmentPath';
|
||||
import { appendPaddingStream, logPadSize } from './util/logPadding';
|
||||
import { prependStream } from './util/prependStream';
|
||||
import { appendMacStream } from './util/appendMacStream';
|
||||
import { Environment } from './environment';
|
||||
import type { AttachmentType } from './types/Attachment';
|
||||
import type { ContextType } from './types/Message2';
|
||||
import { getIvAndDecipher } from './util/getIvAndDecipher';
|
||||
import { getMacAndUpdateHmac } from './util/getMacAndUpdateHmac';
|
||||
import { trimPadding } from './util/trimPadding';
|
||||
import { strictAssert } from './util/assert';
|
||||
import * as Errors from './types/errors';
|
||||
import { isNotNil } from './util/isNotNil';
|
||||
import { missingCaseError } from './util/missingCaseError';
|
||||
import { getEnvironment, Environment } from './environment';
|
||||
|
||||
// This file was split from ts/Crypto.ts because it pulls things in from node, and
|
||||
// too many things pull in Crypto.ts, so it broke storybook.
|
||||
|
||||
const IV_LENGTH = 16;
|
||||
const KEY_LENGTH = 32;
|
||||
const DIGEST_LENGTH = 32;
|
||||
const DIGEST_LENGTH = MAC_LENGTH;
|
||||
const HEX_DIGEST_LENGTH = DIGEST_LENGTH * 2;
|
||||
const ATTACHMENT_MAC_LENGTH = 32;
|
||||
const ATTACHMENT_MAC_LENGTH = MAC_LENGTH;
|
||||
|
||||
export class ReencyptedDigestMismatchError extends Error {}
|
||||
|
||||
/** @private */
|
||||
export const KEY_SET_LENGTH = KEY_LENGTH + ATTACHMENT_MAC_LENGTH;
|
||||
export const KEY_SET_LENGTH = KEY_LENGTH + MAC_LENGTH;
|
||||
|
||||
export function _generateAttachmentIv(): Uint8Array {
|
||||
return randomBytes(IV_LENGTH);
|
||||
|
@ -58,14 +57,31 @@ export type EncryptedAttachmentV2 = {
|
|||
ciphertextSize: number;
|
||||
};
|
||||
|
||||
export type ReencryptedAttachmentV2 = {
|
||||
path: string;
|
||||
iv: Uint8Array;
|
||||
plaintextHash: string;
|
||||
|
||||
key: Uint8Array;
|
||||
};
|
||||
|
||||
export type DecryptedAttachmentV2 = {
|
||||
path: string;
|
||||
iv: Uint8Array;
|
||||
plaintextHash: string;
|
||||
};
|
||||
|
||||
export type ReecryptedAttachmentV2 = {
|
||||
key: Uint8Array;
|
||||
mac: Uint8Array;
|
||||
path: string;
|
||||
iv: Uint8Array;
|
||||
plaintextHash: string;
|
||||
};
|
||||
|
||||
export type PlaintextSourceType =
|
||||
| { data: Uint8Array }
|
||||
| { stream: Readable }
|
||||
| { absolutePath: string };
|
||||
|
||||
export type HardcodedIVForEncryptionType =
|
||||
|
@ -84,6 +100,7 @@ type EncryptAttachmentV2PropsType = {
|
|||
keys: Readonly<Uint8Array>;
|
||||
dangerousIv?: HardcodedIVForEncryptionType;
|
||||
dangerousTestOnlySkipPadding?: boolean;
|
||||
getAbsoluteAttachmentPath: (relativePath: string) => string;
|
||||
};
|
||||
|
||||
export async function encryptAttachmentV2ToDisk(
|
||||
|
@ -91,8 +108,7 @@ export async function encryptAttachmentV2ToDisk(
|
|||
): Promise<EncryptedAttachmentV2 & { path: string }> {
|
||||
// Create random output file
|
||||
const relativeTargetPath = getRelativePath(createName());
|
||||
const absoluteTargetPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
const absoluteTargetPath = args.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
|
||||
await ensureFile(absoluteTargetPath);
|
||||
|
||||
|
@ -128,7 +144,7 @@ export async function encryptAttachmentV2({
|
|||
|
||||
if (dangerousIv) {
|
||||
if (dangerousIv.reason === 'test') {
|
||||
if (window.getEnvironment() !== Environment.Test) {
|
||||
if (getEnvironment() !== Environment.Test) {
|
||||
throw new Error(
|
||||
`${logId}: Used dangerousIv with reason test outside tests!`
|
||||
);
|
||||
|
@ -146,10 +162,7 @@ export async function encryptAttachmentV2({
|
|||
}
|
||||
}
|
||||
|
||||
if (
|
||||
dangerousTestOnlySkipPadding &&
|
||||
window.getEnvironment() !== Environment.Test
|
||||
) {
|
||||
if (dangerousTestOnlySkipPadding && getEnvironment() !== Environment.Test) {
|
||||
throw new Error(
|
||||
`${logId}: Used dangerousTestOnlySkipPadding outside tests!`
|
||||
);
|
||||
|
@ -160,12 +173,17 @@ export async function encryptAttachmentV2({
|
|||
const digest = createHash(HashType.size256);
|
||||
|
||||
let ciphertextSize: number | undefined;
|
||||
let mac: Uint8Array | undefined;
|
||||
|
||||
try {
|
||||
const source =
|
||||
'data' in plaintext
|
||||
? Readable.from([Buffer.from(plaintext.data)])
|
||||
: createReadStream(plaintext.absolutePath);
|
||||
let source: Readable;
|
||||
if ('data' in plaintext) {
|
||||
source = Readable.from([Buffer.from(plaintext.data)]);
|
||||
} else if ('stream' in plaintext) {
|
||||
source = plaintext.stream;
|
||||
} else {
|
||||
source = createReadStream(plaintext.absolutePath);
|
||||
}
|
||||
|
||||
await pipeline(
|
||||
[
|
||||
|
@ -174,7 +192,9 @@ export async function encryptAttachmentV2({
|
|||
dangerousTestOnlySkipPadding ? undefined : appendPaddingStream(),
|
||||
createCipheriv(CipherType.AES256CBC, aesKey, iv),
|
||||
prependIv(iv),
|
||||
appendMacStream(macKey),
|
||||
appendMacStream(macKey, macValue => {
|
||||
mac = macValue;
|
||||
}),
|
||||
peekAndUpdateHash(digest),
|
||||
measureSize(size => {
|
||||
ciphertextSize = size;
|
||||
|
@ -204,6 +224,7 @@ export async function encryptAttachmentV2({
|
|||
);
|
||||
|
||||
strictAssert(ciphertextSize != null, 'Failed to measure ciphertext size!');
|
||||
strictAssert(mac != null, 'Failed to compute mac!');
|
||||
|
||||
if (dangerousIv?.reason === 'reencrypting-for-backup') {
|
||||
if (!constantTimeEqual(ourDigest, dangerousIv.digestToMatch)) {
|
||||
|
@ -221,37 +242,102 @@ export async function encryptAttachmentV2({
|
|||
};
|
||||
}
|
||||
|
||||
type DecryptAttachmentOptionsType = Readonly<{
|
||||
ciphertextPath: string;
|
||||
idForLogging: string;
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
size: number;
|
||||
theirDigest: Readonly<Uint8Array>;
|
||||
outerEncryption?: {
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
};
|
||||
}>;
|
||||
type DecryptAttachmentToSinkOptionsType = Readonly<
|
||||
{
|
||||
ciphertextPath: string;
|
||||
idForLogging: string;
|
||||
size: number;
|
||||
outerEncryption?: {
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
};
|
||||
} & (
|
||||
| {
|
||||
isLocal?: false;
|
||||
theirDigest: Readonly<Uint8Array>;
|
||||
}
|
||||
| {
|
||||
// No need to check integrity for already downloaded attachments
|
||||
isLocal: true;
|
||||
theirDigest?: undefined;
|
||||
}
|
||||
) &
|
||||
(
|
||||
| {
|
||||
aesKey: Readonly<Uint8Array>;
|
||||
macKey: Readonly<Uint8Array>;
|
||||
}
|
||||
| {
|
||||
// The format used by most stored attachments
|
||||
keysBase64: string;
|
||||
}
|
||||
)
|
||||
>;
|
||||
|
||||
export type DecryptAttachmentOptionsType = DecryptAttachmentToSinkOptionsType &
|
||||
Readonly<{
|
||||
getAbsoluteAttachmentPath: (relativePath: string) => string;
|
||||
}>;
|
||||
|
||||
export async function decryptAttachmentV2(
|
||||
options: DecryptAttachmentOptionsType
|
||||
): Promise<DecryptedAttachmentV2> {
|
||||
const {
|
||||
idForLogging,
|
||||
macKey,
|
||||
aesKey,
|
||||
ciphertextPath,
|
||||
theirDigest,
|
||||
outerEncryption,
|
||||
} = options;
|
||||
|
||||
const logId = `decryptAttachmentV2(${idForLogging})`;
|
||||
const logId = `decryptAttachmentV2(${options.idForLogging})`;
|
||||
|
||||
// Create random output file
|
||||
const relativeTargetPath = getRelativePath(createName());
|
||||
const absoluteTargetPath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
options.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
|
||||
let writeFd;
|
||||
try {
|
||||
try {
|
||||
await ensureFile(absoluteTargetPath);
|
||||
writeFd = await open(absoluteTargetPath, 'w');
|
||||
} catch (cause) {
|
||||
throw new Error(`${logId}: Failed to create write path`, { cause });
|
||||
}
|
||||
|
||||
const result = await decryptAttachmentV2ToSink(
|
||||
options,
|
||||
writeFd.createWriteStream()
|
||||
);
|
||||
|
||||
return {
|
||||
...result,
|
||||
path: relativeTargetPath,
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(
|
||||
`${logId}: Failed to decrypt attachment to disk`,
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
safeUnlinkSync(absoluteTargetPath);
|
||||
throw error;
|
||||
} finally {
|
||||
await writeFd?.close();
|
||||
}
|
||||
}
|
||||
|
||||
export async function decryptAttachmentV2ToSink(
|
||||
options: DecryptAttachmentToSinkOptionsType,
|
||||
sink: Writable
|
||||
): Promise<Omit<DecryptedAttachmentV2, 'path'>> {
|
||||
const { idForLogging, ciphertextPath, outerEncryption } = options;
|
||||
|
||||
let aesKey: Uint8Array;
|
||||
let macKey: Uint8Array;
|
||||
|
||||
if ('aesKey' in options) {
|
||||
({ aesKey, macKey } = options);
|
||||
} else {
|
||||
const { keysBase64 } = options;
|
||||
const keys = Buffer.from(keysBase64, 'base64');
|
||||
|
||||
({ aesKey, macKey } = splitKeys(keys));
|
||||
}
|
||||
|
||||
const logId = `decryptAttachmentV2(${idForLogging})`;
|
||||
|
||||
const digest = createHash(HashType.size256);
|
||||
const hmac = createHmac(HashType.size256, macKey);
|
||||
|
@ -276,7 +362,6 @@ export async function decryptAttachmentV2(
|
|||
: undefined;
|
||||
|
||||
let readFd;
|
||||
let writeFd;
|
||||
let iv: Uint8Array | undefined;
|
||||
try {
|
||||
try {
|
||||
|
@ -284,12 +369,6 @@ export async function decryptAttachmentV2(
|
|||
} catch (cause) {
|
||||
throw new Error(`${logId}: Read path doesn't exist`, { cause });
|
||||
}
|
||||
try {
|
||||
await ensureFile(absoluteTargetPath);
|
||||
writeFd = await open(absoluteTargetPath, 'w');
|
||||
} catch (cause) {
|
||||
throw new Error(`${logId}: Failed to create write path`, { cause });
|
||||
}
|
||||
|
||||
await pipeline(
|
||||
[
|
||||
|
@ -305,18 +384,23 @@ export async function decryptAttachmentV2(
|
|||
}),
|
||||
trimPadding(options.size),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
writeFd.createWriteStream(),
|
||||
sink,
|
||||
].filter(isNotNil)
|
||||
);
|
||||
} catch (error) {
|
||||
// These errors happen when canceling fetch from `attachment://` urls,
|
||||
// ignore them to avoid noise in the logs.
|
||||
if (error.name === 'AbortError') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
log.error(
|
||||
`${logId}: Failed to decrypt attachment`,
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
safeUnlinkSync(absoluteTargetPath);
|
||||
throw error;
|
||||
} finally {
|
||||
await Promise.all([readFd?.close(), writeFd?.close()]);
|
||||
await readFd?.close();
|
||||
}
|
||||
|
||||
const ourMac = hmac.digest();
|
||||
|
@ -343,7 +427,7 @@ export async function decryptAttachmentV2(
|
|||
if (!constantTimeEqual(ourMac, theirMac)) {
|
||||
throw new Error(`${logId}: Bad MAC`);
|
||||
}
|
||||
if (!constantTimeEqual(ourDigest, theirDigest)) {
|
||||
if (!options.isLocal && !constantTimeEqual(ourDigest, options.theirDigest)) {
|
||||
throw new Error(`${logId}: Bad digest`);
|
||||
}
|
||||
|
||||
|
@ -372,12 +456,64 @@ export async function decryptAttachmentV2(
|
|||
}
|
||||
|
||||
return {
|
||||
path: relativeTargetPath,
|
||||
iv,
|
||||
plaintextHash: ourPlaintextHash,
|
||||
};
|
||||
}
|
||||
|
||||
export async function reencryptAttachmentV2(
|
||||
options: DecryptAttachmentOptionsType
|
||||
): Promise<ReencryptedAttachmentV2> {
|
||||
const { idForLogging } = options;
|
||||
|
||||
const logId = `reencryptAttachmentV2(${idForLogging})`;
|
||||
|
||||
// Create random output file
|
||||
const relativeTargetPath = getRelativePath(createName());
|
||||
const absoluteTargetPath =
|
||||
options.getAbsoluteAttachmentPath(relativeTargetPath);
|
||||
|
||||
let writeFd;
|
||||
try {
|
||||
try {
|
||||
await ensureFile(absoluteTargetPath);
|
||||
writeFd = await open(absoluteTargetPath, 'w');
|
||||
} catch (cause) {
|
||||
throw new Error(`${logId}: Failed to create write path`, { cause });
|
||||
}
|
||||
|
||||
const keys = generateKeys();
|
||||
|
||||
const passthrough = new PassThrough();
|
||||
const [result] = await Promise.all([
|
||||
decryptAttachmentV2ToSink(options, passthrough),
|
||||
await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintext: {
|
||||
stream: passthrough,
|
||||
},
|
||||
sink: createWriteStream(absoluteTargetPath),
|
||||
getAbsoluteAttachmentPath: options.getAbsoluteAttachmentPath,
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
...result,
|
||||
key: keys,
|
||||
path: relativeTargetPath,
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(
|
||||
`${logId}: Failed to decrypt attachment`,
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
safeUnlinkSync(absoluteTargetPath);
|
||||
throw error;
|
||||
} finally {
|
||||
await writeFd?.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits the keys into aes and mac keys.
|
||||
*/
|
||||
|
@ -396,6 +532,10 @@ export function splitKeys(keys: Uint8Array): AttachmentEncryptionKeysType {
|
|||
return { aesKey, macKey };
|
||||
}
|
||||
|
||||
export function generateKeys(): Uint8Array {
|
||||
return randomBytes(KEY_SET_LENGTH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a hash of the stream without modifying it.
|
||||
*/
|
||||
|
@ -412,122 +552,6 @@ function peekAndUpdateHash(hash: Hash) {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates an hmac with the stream except for the last ATTACHMENT_MAC_LENGTH
|
||||
* bytes. The last ATTACHMENT_MAC_LENGTH bytes are passed to the callback.
|
||||
*/
|
||||
export function getMacAndUpdateHmac(
|
||||
hmac: Hmac,
|
||||
onTheirMac: (theirMac: Uint8Array) => void
|
||||
): Transform {
|
||||
// Because we don't have a view of the entire stream, we don't know when we're
|
||||
// at the end. We need to omit the last ATTACHMENT_MAC_LENGTH bytes from
|
||||
// `hmac.update` so we only push what we know is not the mac.
|
||||
let maybeMacBytes = Buffer.alloc(0);
|
||||
|
||||
function updateWithKnownNonMacBytes() {
|
||||
let knownNonMacBytes = null;
|
||||
if (maybeMacBytes.byteLength > ATTACHMENT_MAC_LENGTH) {
|
||||
knownNonMacBytes = maybeMacBytes.subarray(0, -ATTACHMENT_MAC_LENGTH);
|
||||
maybeMacBytes = maybeMacBytes.subarray(-ATTACHMENT_MAC_LENGTH);
|
||||
hmac.update(knownNonMacBytes);
|
||||
}
|
||||
return knownNonMacBytes;
|
||||
}
|
||||
|
||||
return new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
try {
|
||||
maybeMacBytes = Buffer.concat([maybeMacBytes, chunk]);
|
||||
const knownNonMac = updateWithKnownNonMacBytes();
|
||||
callback(null, knownNonMac);
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
try {
|
||||
onTheirMac(maybeMacBytes);
|
||||
callback(null, null);
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the IV from the start of the stream and creates a decipher.
|
||||
* Then deciphers the rest of the stream.
|
||||
*/
|
||||
export function getIvAndDecipher(
|
||||
aesKey: Uint8Array,
|
||||
onFoundIv?: (iv: Buffer) => void
|
||||
): Transform {
|
||||
let maybeIvBytes: Buffer | null = Buffer.alloc(0);
|
||||
let decipher: Decipher | null = null;
|
||||
return new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
try {
|
||||
// If we've already initialized the decipher, just pass the chunk through.
|
||||
if (decipher != null) {
|
||||
callback(null, decipher.update(chunk));
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait until we have enough bytes to get the iv to initialize the
|
||||
// decipher.
|
||||
maybeIvBytes = Buffer.concat([maybeIvBytes, chunk]);
|
||||
if (maybeIvBytes.byteLength < IV_LENGTH) {
|
||||
callback(null, null);
|
||||
return;
|
||||
}
|
||||
|
||||
// Once we have enough bytes, initialize the decipher and pass the
|
||||
// remainder of the bytes through.
|
||||
const iv = maybeIvBytes.subarray(0, IV_LENGTH);
|
||||
const remainder = maybeIvBytes.subarray(IV_LENGTH);
|
||||
onFoundIv?.(iv);
|
||||
maybeIvBytes = null; // free memory
|
||||
decipher = createDecipheriv(CipherType.AES256CBC, aesKey, iv);
|
||||
callback(null, decipher.update(remainder));
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
try {
|
||||
strictAssert(decipher != null, 'decipher must be set');
|
||||
callback(null, decipher.final());
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncates the stream to the target size.
|
||||
*/
|
||||
function trimPadding(size: number) {
|
||||
let total = 0;
|
||||
return new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
const chunkSize = chunk.byteLength;
|
||||
const sizeLeft = size - total;
|
||||
if (sizeLeft >= chunkSize) {
|
||||
total += chunkSize;
|
||||
callback(null, chunk);
|
||||
} else if (sizeLeft > 0) {
|
||||
total += sizeLeft;
|
||||
callback(null, chunk.subarray(0, sizeLeft));
|
||||
} else {
|
||||
callback(null, null);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function measureSize(onComplete: (size: number) => void): Transform {
|
||||
let totalBytes = 0;
|
||||
const passthrough = new PassThrough();
|
||||
|
@ -568,62 +592,6 @@ function prependIv(iv: Uint8Array) {
|
|||
return prependStream(iv);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called during message schema migration. New messages downloaded should have
|
||||
* plaintextHash added automatically during decryption / writing to file system.
|
||||
*/
|
||||
export async function addPlaintextHashToAttachment(
|
||||
attachment: AttachmentType,
|
||||
{ getAbsoluteAttachmentPath }: ContextType
|
||||
): Promise<AttachmentType> {
|
||||
if (!attachment.path) {
|
||||
return attachment;
|
||||
}
|
||||
|
||||
const plaintextHash = await getPlaintextHashForAttachmentOnDisk(
|
||||
getAbsoluteAttachmentPath(attachment.path)
|
||||
);
|
||||
|
||||
if (!plaintextHash) {
|
||||
log.error('addPlaintextHashToAttachment: Failed to generate hash');
|
||||
return attachment;
|
||||
}
|
||||
|
||||
return {
|
||||
...attachment,
|
||||
plaintextHash,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getPlaintextHashForAttachmentOnDisk(
|
||||
absolutePath: string
|
||||
): Promise<string | undefined> {
|
||||
let readFd;
|
||||
try {
|
||||
try {
|
||||
readFd = await open(absolutePath, 'r');
|
||||
} catch (error) {
|
||||
log.error('addPlaintextHashToAttachment: Target path does not exist');
|
||||
return undefined;
|
||||
}
|
||||
const hash = createHash(HashType.size256);
|
||||
await pipeline(readFd.createReadStream(), hash);
|
||||
const plaintextHash = hash.digest('hex');
|
||||
if (!plaintextHash) {
|
||||
log.error(
|
||||
'addPlaintextHashToAttachment: no hash generated from file; is the file empty?'
|
||||
);
|
||||
return;
|
||||
}
|
||||
return plaintextHash;
|
||||
} catch (error) {
|
||||
log.error('addPlaintextHashToAttachment: error during file read', error);
|
||||
return undefined;
|
||||
} finally {
|
||||
await readFd?.close();
|
||||
}
|
||||
}
|
||||
|
||||
export function getPlaintextHashForInMemoryAttachment(
|
||||
data: Uint8Array
|
||||
): string {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue