Ensure attachments are re-encryptable to same digest
This commit is contained in:
parent
7d25988888
commit
6e1fd5958e
20 changed files with 1250 additions and 295 deletions
|
@ -64,13 +64,21 @@ export type ReencryptedAttachmentV2 = {
|
|||
iv: string;
|
||||
plaintextHash: string;
|
||||
localKey: string;
|
||||
isReencryptableToSameDigest: boolean;
|
||||
version: 2;
|
||||
};
|
||||
|
||||
export type ReencryptionInfo = {
|
||||
iv: string;
|
||||
key: string;
|
||||
digest: string;
|
||||
};
|
||||
|
||||
export type DecryptedAttachmentV2 = {
|
||||
path: string;
|
||||
iv: Uint8Array;
|
||||
plaintextHash: string;
|
||||
isReencryptableToSameDigest: boolean;
|
||||
};
|
||||
|
||||
export type PlaintextSourceType =
|
||||
|
@ -356,6 +364,7 @@ export async function decryptAttachmentV2ToSink(
|
|||
})
|
||||
: undefined;
|
||||
|
||||
let isPaddingAllZeros = false;
|
||||
let readFd;
|
||||
let iv: Uint8Array | undefined;
|
||||
try {
|
||||
|
@ -377,7 +386,9 @@ export async function decryptAttachmentV2ToSink(
|
|||
getIvAndDecipher(aesKey, theirIv => {
|
||||
iv = theirIv;
|
||||
}),
|
||||
trimPadding(options.size),
|
||||
trimPadding(options.size, paddingAnalysis => {
|
||||
isPaddingAllZeros = paddingAnalysis.isPaddingAllZeros;
|
||||
}),
|
||||
peekAndUpdateHash(plaintextHash),
|
||||
finalStream(() => {
|
||||
const ourMac = hmac.digest();
|
||||
|
@ -469,8 +480,13 @@ export async function decryptAttachmentV2ToSink(
|
|||
`${logId}: failed to find their iv`
|
||||
);
|
||||
|
||||
if (!isPaddingAllZeros) {
|
||||
log.warn(`${logId}: Attachment had non-zero padding`);
|
||||
}
|
||||
|
||||
return {
|
||||
iv,
|
||||
isReencryptableToSameDigest: isPaddingAllZeros,
|
||||
plaintextHash: ourPlaintextHash,
|
||||
};
|
||||
}
|
||||
|
@ -512,10 +528,11 @@ export async function decryptAndReencryptLocally(
|
|||
]);
|
||||
|
||||
return {
|
||||
...result,
|
||||
localKey: toBase64(keys),
|
||||
iv: toBase64(result.iv),
|
||||
path: relativeTargetPath,
|
||||
plaintextHash: result.plaintextHash,
|
||||
isReencryptableToSameDigest: result.isReencryptableToSameDigest,
|
||||
version: 2,
|
||||
};
|
||||
} catch (error) {
|
||||
|
|
|
@ -92,7 +92,7 @@ type AttachmentDownloadManagerParamsType = Omit<
|
|||
job: AttachmentDownloadJobType;
|
||||
isLastAttempt: boolean;
|
||||
options?: { isForCurrentlyVisibleMessage: boolean };
|
||||
dependencies: { downloadAttachment: typeof downloadAttachmentUtil };
|
||||
dependencies?: DependenciesType;
|
||||
}) => Promise<JobManagerJobResultType<CoreAttachmentDownloadJobType>>;
|
||||
};
|
||||
|
||||
|
@ -157,7 +157,6 @@ export class AttachmentDownloadManager extends JobManager<CoreAttachmentDownload
|
|||
options: {
|
||||
isForCurrentlyVisibleMessage,
|
||||
},
|
||||
dependencies: { downloadAttachment: downloadAttachmentUtil },
|
||||
});
|
||||
},
|
||||
});
|
||||
|
@ -239,16 +238,24 @@ export class AttachmentDownloadManager extends JobManager<CoreAttachmentDownload
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
type DependenciesType = {
|
||||
downloadAttachment: typeof downloadAttachmentUtil;
|
||||
processNewAttachment: typeof window.Signal.Migrations.processNewAttachment;
|
||||
};
|
||||
async function runDownloadAttachmentJob({
|
||||
job,
|
||||
isLastAttempt,
|
||||
options,
|
||||
dependencies,
|
||||
dependencies = {
|
||||
downloadAttachment: downloadAttachmentUtil,
|
||||
processNewAttachment: window.Signal.Migrations.processNewAttachment,
|
||||
},
|
||||
}: {
|
||||
job: AttachmentDownloadJobType;
|
||||
isLastAttempt: boolean;
|
||||
options?: { isForCurrentlyVisibleMessage: boolean };
|
||||
dependencies: { downloadAttachment: typeof downloadAttachmentUtil };
|
||||
dependencies?: DependenciesType;
|
||||
}): Promise<JobManagerJobResultType<CoreAttachmentDownloadJobType>> {
|
||||
const jobIdForLogging = getJobIdForLogging(job);
|
||||
const logId = `AttachmentDownloadManager/runDownloadAttachmentJob/${jobIdForLogging}`;
|
||||
|
@ -362,7 +369,7 @@ export async function runDownloadAttachmentJobInner({
|
|||
}: {
|
||||
job: AttachmentDownloadJobType;
|
||||
isForCurrentlyVisibleMessage: boolean;
|
||||
dependencies: { downloadAttachment: typeof downloadAttachmentUtil };
|
||||
dependencies: DependenciesType;
|
||||
}): Promise<DownloadAttachmentResultType> {
|
||||
const { messageId, attachment, attachmentType } = job;
|
||||
|
||||
|
@ -437,11 +444,10 @@ export async function runDownloadAttachmentJobInner({
|
|||
variant: AttachmentVariant.Default,
|
||||
});
|
||||
|
||||
const upgradedAttachment =
|
||||
await window.Signal.Migrations.processNewAttachment({
|
||||
...omit(attachment, ['error', 'pending', 'downloadPath']),
|
||||
...downloaded,
|
||||
});
|
||||
const upgradedAttachment = await dependencies.processNewAttachment({
|
||||
...omit(attachment, ['error', 'pending', 'downloadPath']),
|
||||
...downloaded,
|
||||
});
|
||||
|
||||
await addAttachmentToMessage(messageId, upgradedAttachment, logId, {
|
||||
type: attachmentType,
|
||||
|
|
|
@ -16,31 +16,30 @@ import {
|
|||
isAttachmentLocallySaved,
|
||||
type AttachmentDownloadableFromTransitTier,
|
||||
type AttachmentDownloadableFromBackupTier,
|
||||
type LocallySavedAttachment,
|
||||
type AttachmentReadyForBackup,
|
||||
isDecryptable,
|
||||
isReencryptableToSameDigest,
|
||||
isReencryptableWithNewEncryptionInfo,
|
||||
type ReencryptableAttachment,
|
||||
} from '../../../types/Attachment';
|
||||
import { Backups, SignalService } from '../../../protobuf';
|
||||
import * as Bytes from '../../../Bytes';
|
||||
import { getTimestampFromLong } from '../../../util/timestampLongUtils';
|
||||
import {
|
||||
encryptAttachmentV2,
|
||||
generateAttachmentKeys,
|
||||
} from '../../../AttachmentCrypto';
|
||||
import { strictAssert } from '../../../util/assert';
|
||||
import type { CoreAttachmentBackupJobType } from '../../../types/AttachmentBackup';
|
||||
import {
|
||||
type GetBackupCdnInfoType,
|
||||
getMediaIdForAttachment,
|
||||
getMediaIdFromMediaName,
|
||||
getMediaNameForAttachment,
|
||||
getMediaNameFromDigest,
|
||||
type BackupCdnInfoType,
|
||||
} from './mediaId';
|
||||
import { redactGenericText } from '../../../util/privacy';
|
||||
import { missingCaseError } from '../../../util/missingCaseError';
|
||||
import { toLogFormat } from '../../../types/errors';
|
||||
import { bytesToUuid } from '../../../util/uuidToBytes';
|
||||
import { createName } from '../../../util/attachmentPath';
|
||||
import { ensureAttachmentIsReencryptable } from '../../../util/ensureAttachmentIsReencryptable';
|
||||
import type { ReencryptionInfo } from '../../../AttachmentCrypto';
|
||||
|
||||
export function convertFilePointerToAttachment(
|
||||
filePointer: Backups.FilePointer,
|
||||
|
@ -166,58 +165,12 @@ export function convertBackupMessageAttachmentToAttachment(
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Some attachments saved on desktop do not include the key used to encrypt the file
|
||||
* originally. This means that we need to encrypt the file in-memory now (at
|
||||
* export-creation time) to calculate the digest which will be saved in the backup proto
|
||||
* along with the new keys.
|
||||
*/
|
||||
|
||||
async function generateNewEncryptionInfoForAttachment(
|
||||
attachment: Readonly<LocallySavedAttachment>
|
||||
): Promise<AttachmentReadyForBackup> {
|
||||
const fixedUpAttachment = { ...attachment };
|
||||
|
||||
// Since we are changing the encryption, we need to delete all encryption & location
|
||||
// related info
|
||||
delete fixedUpAttachment.cdnId;
|
||||
delete fixedUpAttachment.cdnKey;
|
||||
delete fixedUpAttachment.cdnNumber;
|
||||
delete fixedUpAttachment.backupLocator;
|
||||
delete fixedUpAttachment.uploadTimestamp;
|
||||
delete fixedUpAttachment.digest;
|
||||
delete fixedUpAttachment.iv;
|
||||
delete fixedUpAttachment.key;
|
||||
|
||||
const keys = generateAttachmentKeys();
|
||||
|
||||
// encrypt this file without writing the ciphertext to disk in order to calculate the
|
||||
// digest
|
||||
const { digest, iv } = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintext: {
|
||||
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
|
||||
return {
|
||||
...fixedUpAttachment,
|
||||
digest: Bytes.toBase64(digest),
|
||||
iv: Bytes.toBase64(iv),
|
||||
key: Bytes.toBase64(keys),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getFilePointerForAttachment({
|
||||
attachment,
|
||||
backupLevel,
|
||||
getBackupCdnInfo,
|
||||
}: {
|
||||
attachment: AttachmentType;
|
||||
attachment: Readonly<AttachmentType>;
|
||||
backupLevel: BackupLevel;
|
||||
getBackupCdnInfo: GetBackupCdnInfoType;
|
||||
}): Promise<{
|
||||
|
@ -314,54 +267,43 @@ export async function getFilePointerForAttachment({
|
|||
};
|
||||
}
|
||||
|
||||
// Some attachments (e.g. those quoted ones copied from the original message) may not
|
||||
// have any encryption info, including a digest.
|
||||
if (attachment.digest) {
|
||||
// From here on, this attachment is headed to (or already on) the backup tier!
|
||||
const mediaNameForCurrentVersionOfAttachment =
|
||||
getMediaNameForAttachment(attachment);
|
||||
// From here on, this attachment is headed to (or already on) the backup tier!
|
||||
const mediaNameForCurrentVersionOfAttachment = attachment.digest
|
||||
? getMediaNameForAttachment(attachment)
|
||||
: undefined;
|
||||
|
||||
const backupCdnInfo = await getBackupCdnInfo(
|
||||
getMediaIdFromMediaName(mediaNameForCurrentVersionOfAttachment).string
|
||||
);
|
||||
const backupCdnInfo: BackupCdnInfoType =
|
||||
mediaNameForCurrentVersionOfAttachment
|
||||
? await getBackupCdnInfo(
|
||||
getMediaIdFromMediaName(mediaNameForCurrentVersionOfAttachment).string
|
||||
)
|
||||
: { isInBackupTier: false };
|
||||
|
||||
// We can generate a backupLocator for this mediaName iff
|
||||
// 1. we have iv, key, and digest so we can re-encrypt to the existing digest when
|
||||
// uploading, or
|
||||
// 2. the mediaId is already in the backup tier and we have the key & digest to
|
||||
// decrypt and verify it
|
||||
if (
|
||||
isReencryptableToSameDigest(attachment) ||
|
||||
(backupCdnInfo.isInBackupTier && isDecryptable(attachment))
|
||||
) {
|
||||
return {
|
||||
filePointer: new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
backupLocator: getBackupLocator({
|
||||
...attachment,
|
||||
backupLocator: {
|
||||
mediaName: mediaNameForCurrentVersionOfAttachment,
|
||||
cdnNumber: backupCdnInfo.isInBackupTier
|
||||
? backupCdnInfo.cdnNumber
|
||||
: undefined,
|
||||
},
|
||||
}),
|
||||
// If we have key & digest for this attachment and it's already on backup tier, we can
|
||||
// reference it
|
||||
if (isDecryptable(attachment) && backupCdnInfo.isInBackupTier) {
|
||||
strictAssert(mediaNameForCurrentVersionOfAttachment, 'must exist');
|
||||
return {
|
||||
filePointer: new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
backupLocator: getBackupLocator({
|
||||
...attachment,
|
||||
backupLocator: {
|
||||
mediaName: mediaNameForCurrentVersionOfAttachment,
|
||||
cdnNumber: backupCdnInfo.isInBackupTier
|
||||
? backupCdnInfo.cdnNumber
|
||||
: undefined,
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
let attachmentWithNewEncryptionInfo: AttachmentReadyForBackup | undefined;
|
||||
let reencryptableAttachment: ReencryptableAttachment;
|
||||
try {
|
||||
log.info(`${logId}: Generating new encryption info for attachment`);
|
||||
attachmentWithNewEncryptionInfo =
|
||||
await generateNewEncryptionInfoForAttachment(attachment);
|
||||
reencryptableAttachment = await ensureAttachmentIsReencryptable(attachment);
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`${logId}: Error when generating new encryption info for attachment`,
|
||||
toLogFormat(e)
|
||||
);
|
||||
|
||||
log.warn('Unable to ensure attachment is reencryptable', toLogFormat(e));
|
||||
return {
|
||||
filePointer: new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
|
@ -370,18 +312,53 @@ export async function getFilePointerForAttachment({
|
|||
};
|
||||
}
|
||||
|
||||
// If we've confirmed that we can re-encrypt this attachment to the same digest, we can
|
||||
// generate a backupLocator (and upload the file)
|
||||
if (isReencryptableToSameDigest(reencryptableAttachment)) {
|
||||
return {
|
||||
filePointer: new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
backupLocator: getBackupLocator({
|
||||
...reencryptableAttachment,
|
||||
backupLocator: {
|
||||
mediaName: getMediaNameFromDigest(reencryptableAttachment.digest),
|
||||
cdnNumber: backupCdnInfo.isInBackupTier
|
||||
? backupCdnInfo.cdnNumber
|
||||
: undefined,
|
||||
},
|
||||
}),
|
||||
}),
|
||||
updatedAttachment: reencryptableAttachment,
|
||||
};
|
||||
}
|
||||
|
||||
strictAssert(
|
||||
reencryptableAttachment.reencryptionInfo,
|
||||
'Reencryption info must exist if not reencryptable to original digest'
|
||||
);
|
||||
|
||||
const mediaNameForNewEncryptionInfo = getMediaNameFromDigest(
|
||||
reencryptableAttachment.reencryptionInfo.digest
|
||||
);
|
||||
const backupCdnInfoForNewEncryptionInfo = await getBackupCdnInfo(
|
||||
getMediaIdFromMediaName(mediaNameForNewEncryptionInfo).string
|
||||
);
|
||||
|
||||
return {
|
||||
filePointer: new Backups.FilePointer({
|
||||
...filePointerRootProps,
|
||||
backupLocator: getBackupLocator({
|
||||
...attachmentWithNewEncryptionInfo,
|
||||
size: reencryptableAttachment.size,
|
||||
...reencryptableAttachment.reencryptionInfo,
|
||||
backupLocator: {
|
||||
mediaName: getMediaNameForAttachment(attachmentWithNewEncryptionInfo),
|
||||
cdnNumber: undefined,
|
||||
mediaName: mediaNameForNewEncryptionInfo,
|
||||
cdnNumber: backupCdnInfoForNewEncryptionInfo.isInBackupTier
|
||||
? backupCdnInfoForNewEncryptionInfo.cdnNumber
|
||||
: undefined,
|
||||
},
|
||||
}),
|
||||
}),
|
||||
updatedAttachment: attachmentWithNewEncryptionInfo,
|
||||
updatedAttachment: reencryptableAttachment,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -400,7 +377,12 @@ function getAttachmentLocator(
|
|||
});
|
||||
}
|
||||
|
||||
function getBackupLocator(attachment: AttachmentDownloadableFromBackupTier) {
|
||||
function getBackupLocator(
|
||||
attachment: Pick<
|
||||
AttachmentDownloadableFromBackupTier,
|
||||
'backupLocator' | 'digest' | 'key' | 'size' | 'cdnKey' | 'cdnNumber'
|
||||
>
|
||||
) {
|
||||
return new Backups.FilePointer.BackupLocator({
|
||||
mediaName: attachment.backupLocator.mediaName,
|
||||
cdnNumber: attachment.backupLocator.cdnNumber,
|
||||
|
@ -431,40 +413,51 @@ export async function maybeGetBackupJobForAttachmentAndFilePointer({
|
|||
return null;
|
||||
}
|
||||
|
||||
const mediaName = getMediaNameForAttachment(attachment);
|
||||
const { mediaName } = filePointer.backupLocator;
|
||||
strictAssert(mediaName, 'mediaName must exist');
|
||||
|
||||
const { isInBackupTier } = await getBackupCdnInfo(
|
||||
getMediaIdForAttachment(attachment).string
|
||||
getMediaIdFromMediaName(mediaName).string
|
||||
);
|
||||
|
||||
if (isInBackupTier) {
|
||||
return null;
|
||||
}
|
||||
|
||||
strictAssert(
|
||||
isReencryptableToSameDigest(attachment),
|
||||
'Attachment must now have all required info for re-encryption'
|
||||
);
|
||||
|
||||
strictAssert(
|
||||
isAttachmentLocallySaved(attachment),
|
||||
'Attachment must be saved locally for it to be backed up'
|
||||
);
|
||||
|
||||
const {
|
||||
path,
|
||||
contentType,
|
||||
key: keys,
|
||||
digest,
|
||||
iv,
|
||||
size,
|
||||
cdnKey,
|
||||
cdnNumber,
|
||||
uploadTimestamp,
|
||||
version,
|
||||
localKey,
|
||||
} = attachment;
|
||||
let encryptionInfo: ReencryptionInfo | undefined;
|
||||
|
||||
if (isReencryptableToSameDigest(attachment)) {
|
||||
encryptionInfo = {
|
||||
iv: attachment.iv,
|
||||
key: attachment.key,
|
||||
digest: attachment.digest,
|
||||
};
|
||||
} else {
|
||||
strictAssert(
|
||||
isReencryptableWithNewEncryptionInfo(attachment) === true,
|
||||
'must have new encryption info'
|
||||
);
|
||||
encryptionInfo = attachment.reencryptionInfo;
|
||||
}
|
||||
|
||||
strictAssert(
|
||||
filePointer.backupLocator.digest,
|
||||
'digest must exist on backupLocator'
|
||||
);
|
||||
strictAssert(
|
||||
encryptionInfo.digest === Bytes.toBase64(filePointer.backupLocator.digest),
|
||||
'digest on job and backupLocator must match'
|
||||
);
|
||||
|
||||
const { path, contentType, size, uploadTimestamp, version, localKey } =
|
||||
attachment;
|
||||
|
||||
const { transitCdnKey, transitCdnNumber } = filePointer.backupLocator;
|
||||
|
||||
return {
|
||||
mediaName,
|
||||
|
@ -473,17 +466,17 @@ export async function maybeGetBackupJobForAttachmentAndFilePointer({
|
|||
data: {
|
||||
path,
|
||||
contentType,
|
||||
keys,
|
||||
digest,
|
||||
iv,
|
||||
keys: encryptionInfo.key,
|
||||
digest: encryptionInfo.digest,
|
||||
iv: encryptionInfo.iv,
|
||||
size,
|
||||
version,
|
||||
localKey,
|
||||
transitCdnInfo:
|
||||
cdnKey && cdnNumber != null
|
||||
transitCdnKey != null && transitCdnNumber != null
|
||||
? {
|
||||
cdnKey,
|
||||
cdnNumber,
|
||||
cdnKey: transitCdnKey,
|
||||
cdnNumber: transitCdnNumber,
|
||||
uploadTimestamp,
|
||||
}
|
||||
: undefined,
|
||||
|
|
|
@ -42,7 +42,11 @@ export function getMediaNameForAttachment(attachment: AttachmentType): string {
|
|||
return attachment.backupLocator.mediaName;
|
||||
}
|
||||
strictAssert(attachment.digest, 'Digest must be present');
|
||||
return Bytes.toHex(Bytes.fromBase64(attachment.digest));
|
||||
return getMediaNameFromDigest(attachment.digest);
|
||||
}
|
||||
|
||||
export function getMediaNameFromDigest(digest: string): string {
|
||||
return Bytes.toHex(Bytes.fromBase64(digest));
|
||||
}
|
||||
|
||||
export function getMediaNameForAttachmentThumbnail(
|
||||
|
@ -55,11 +59,13 @@ export function getBytesFromMediaIdString(mediaId: string): Uint8Array {
|
|||
return Bytes.fromBase64url(mediaId);
|
||||
}
|
||||
|
||||
export type BackupCdnInfoType =
|
||||
| { isInBackupTier: true; cdnNumber: number }
|
||||
| { isInBackupTier: false };
|
||||
|
||||
export type GetBackupCdnInfoType = (
|
||||
mediaId: string
|
||||
) => Promise<
|
||||
{ isInBackupTier: true; cdnNumber: number } | { isInBackupTier: false }
|
||||
>;
|
||||
) => Promise<BackupCdnInfoType>;
|
||||
|
||||
export const getBackupCdnInfo: GetBackupCdnInfoType = async (
|
||||
mediaId: string
|
||||
|
|
|
@ -4654,7 +4654,6 @@ function getUnprocessedCount(db: ReadableDB): number {
|
|||
}
|
||||
|
||||
function getAllUnprocessedIds(db: WritableDB): Array<string> {
|
||||
logger.info('getAllUnprocessedIds');
|
||||
return db.transaction(() => {
|
||||
// cleanup first
|
||||
const { changes: deletedStaleCount } = db
|
||||
|
|
|
@ -45,6 +45,7 @@ import {
|
|||
getAttachmentCiphertextLength,
|
||||
splitKeys,
|
||||
generateAttachmentKeys,
|
||||
type DecryptedAttachmentV2,
|
||||
} from '../AttachmentCrypto';
|
||||
import { createTempDir, deleteTempDir } from '../updater/common';
|
||||
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
|
||||
|
@ -610,13 +611,15 @@ describe('Crypto', () => {
|
|||
plaintextHash,
|
||||
encryptionKeys,
|
||||
dangerousIv,
|
||||
overrideSize,
|
||||
}: {
|
||||
path?: string;
|
||||
data: Uint8Array;
|
||||
plaintextHash: Uint8Array;
|
||||
plaintextHash?: Uint8Array;
|
||||
encryptionKeys?: Uint8Array;
|
||||
dangerousIv?: HardcodedIVForEncryptionType;
|
||||
}): Promise<void> {
|
||||
overrideSize?: number;
|
||||
}): Promise<DecryptedAttachmentV2> {
|
||||
let plaintextPath;
|
||||
let ciphertextPath;
|
||||
const keys = encryptionKeys ?? generateAttachmentKeys();
|
||||
|
@ -639,7 +642,7 @@ describe('Crypto', () => {
|
|||
ciphertextPath,
|
||||
idForLogging: 'test',
|
||||
...splitKeys(keys),
|
||||
size: data.byteLength,
|
||||
size: overrideSize ?? data.byteLength,
|
||||
theirDigest: encryptedAttachment.digest,
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
|
@ -664,19 +667,27 @@ describe('Crypto', () => {
|
|||
}
|
||||
}
|
||||
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.ciphertextSize,
|
||||
getAttachmentCiphertextLength(data.byteLength)
|
||||
);
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.plaintextHash,
|
||||
Bytes.toHex(plaintextHash)
|
||||
);
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
|
||||
if (overrideSize == null) {
|
||||
assert.isTrue(constantTimeEqual(data, plaintext));
|
||||
assert.strictEqual(
|
||||
decryptedAttachment.plaintextHash,
|
||||
encryptedAttachment.plaintextHash
|
||||
);
|
||||
}
|
||||
|
||||
if (plaintextHash) {
|
||||
assert.strictEqual(
|
||||
encryptedAttachment.plaintextHash,
|
||||
Bytes.toHex(plaintextHash)
|
||||
);
|
||||
}
|
||||
|
||||
return decryptedAttachment;
|
||||
} finally {
|
||||
if (plaintextPath) {
|
||||
unlinkSync(plaintextPath);
|
||||
|
@ -736,6 +747,25 @@ describe('Crypto', () => {
|
|||
plaintextHash,
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPaddingAllZeros', () => {
|
||||
it('detects all zeros', async () => {
|
||||
const decryptedResult = await testV2RoundTripData({
|
||||
data: FILE_CONTENTS,
|
||||
});
|
||||
assert.isTrue(decryptedResult.isReencryptableToSameDigest);
|
||||
});
|
||||
it('detects non-zero padding', async () => {
|
||||
const modifiedData = Buffer.concat([FILE_CONTENTS, Buffer.from([1])]);
|
||||
const decryptedResult = await testV2RoundTripData({
|
||||
data: modifiedData,
|
||||
overrideSize: FILE_CONTENTS.byteLength,
|
||||
// setting the size as one less than the actual file size will cause the last
|
||||
// byte (`1`) to be considered padding during decryption
|
||||
});
|
||||
assert.isFalse(decryptedResult.isReencryptableToSameDigest);
|
||||
});
|
||||
});
|
||||
describe('dangerousIv', () => {
|
||||
it('uses hardcodedIv in tests', async () => {
|
||||
await testV2RoundTripData({
|
||||
|
|
|
@ -35,6 +35,19 @@ import { loadAll } from '../../services/allLoaders';
|
|||
|
||||
const CONTACT_A = generateAci();
|
||||
|
||||
const NON_ROUNDTRIPPED_FIELDS = [
|
||||
'path',
|
||||
'iv',
|
||||
'thumbnail',
|
||||
'screenshot',
|
||||
'isReencryptableToSameDigest',
|
||||
];
|
||||
|
||||
const NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS = [
|
||||
...NON_ROUNDTRIPPED_FIELDS,
|
||||
'uploadTimestamp',
|
||||
];
|
||||
|
||||
describe('backup/attachments', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let contactA: ConversationModel;
|
||||
|
@ -98,6 +111,7 @@ describe('backup/attachments', () => {
|
|||
size: 100,
|
||||
contentType: IMAGE_JPEG,
|
||||
path: `/path/to/file${index}.png`,
|
||||
isReencryptableToSameDigest: true,
|
||||
uploadTimestamp: index,
|
||||
thumbnail: {
|
||||
size: 1024,
|
||||
|
@ -153,8 +167,8 @@ describe('backup/attachments', () => {
|
|||
[
|
||||
composeMessage(1, {
|
||||
attachments: [
|
||||
omit(longMessageAttachment, ['path', 'iv', 'thumbnail']),
|
||||
omit(normalAttachment, ['path', 'iv', 'thumbnail']),
|
||||
omit(longMessageAttachment, NON_ROUNDTRIPPED_FIELDS),
|
||||
omit(normalAttachment, NON_ROUNDTRIPPED_FIELDS),
|
||||
],
|
||||
}),
|
||||
],
|
||||
|
@ -227,7 +241,7 @@ describe('backup/attachments', () => {
|
|||
composeMessage(1, {
|
||||
body: 'a'.repeat(2048),
|
||||
bodyAttachment: {
|
||||
...omit(attachment, ['iv', 'path', 'uploadTimestamp']),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -269,8 +283,8 @@ describe('backup/attachments', () => {
|
|||
[
|
||||
composeMessage(1, {
|
||||
attachments: [
|
||||
omit(attachment1, ['path', 'iv', 'thumbnail']),
|
||||
omit(attachment2, ['path', 'iv', 'thumbnail']),
|
||||
omit(attachment1, NON_ROUNDTRIPPED_FIELDS),
|
||||
omit(attachment2, NON_ROUNDTRIPPED_FIELDS),
|
||||
],
|
||||
}),
|
||||
],
|
||||
|
@ -293,12 +307,7 @@ describe('backup/attachments', () => {
|
|||
// but there will be a backupLocator
|
||||
attachments: [
|
||||
{
|
||||
...omit(attachment, [
|
||||
'path',
|
||||
'iv',
|
||||
'thumbnail',
|
||||
'uploadTimestamp',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -327,12 +336,7 @@ describe('backup/attachments', () => {
|
|||
composeMessage(1, {
|
||||
attachments: [
|
||||
{
|
||||
...omit(attachment, [
|
||||
'path',
|
||||
'iv',
|
||||
'thumbnail',
|
||||
'uploadTimestamp',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -362,7 +366,7 @@ describe('backup/attachments', () => {
|
|||
{
|
||||
url: 'url',
|
||||
date: 1,
|
||||
image: omit(attachment, ['path', 'iv', 'thumbnail']),
|
||||
image: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
@ -399,12 +403,7 @@ describe('backup/attachments', () => {
|
|||
image: {
|
||||
// path, iv, and uploadTimestamp will not be roundtripped,
|
||||
// but there will be a backupLocator
|
||||
...omit(attachment, [
|
||||
'path',
|
||||
'iv',
|
||||
'thumbnail',
|
||||
'uploadTimestamp',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -434,7 +433,7 @@ describe('backup/attachments', () => {
|
|||
contact: [
|
||||
{
|
||||
avatar: {
|
||||
avatar: omit(attachment, ['path', 'iv', 'thumbnail']),
|
||||
avatar: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
|
||||
isProfile: false,
|
||||
},
|
||||
},
|
||||
|
@ -462,12 +461,7 @@ describe('backup/attachments', () => {
|
|||
{
|
||||
avatar: {
|
||||
avatar: {
|
||||
...omit(attachment, [
|
||||
'path',
|
||||
'iv',
|
||||
'thumbnail',
|
||||
'uploadTimestamp',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -511,7 +505,7 @@ describe('backup/attachments', () => {
|
|||
referencedMessageNotFound: true,
|
||||
attachments: [
|
||||
{
|
||||
thumbnail: omit(attachment, ['iv', 'path', 'thumbnail']),
|
||||
thumbnail: omit(attachment, NON_ROUNDTRIPPED_FIELDS),
|
||||
contentType: VIDEO_MP4,
|
||||
},
|
||||
],
|
||||
|
@ -549,12 +543,7 @@ describe('backup/attachments', () => {
|
|||
attachments: [
|
||||
{
|
||||
thumbnail: {
|
||||
...omit(attachment, [
|
||||
'iv',
|
||||
'path',
|
||||
'uploadTimestamp',
|
||||
'thumbnail',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
@ -602,12 +591,10 @@ describe('backup/attachments', () => {
|
|||
...existingMessage,
|
||||
attachments: [
|
||||
{
|
||||
...omit(existingAttachment, [
|
||||
'path',
|
||||
'iv',
|
||||
'uploadTimestamp',
|
||||
'thumbnail',
|
||||
]),
|
||||
...omit(
|
||||
existingAttachment,
|
||||
NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS
|
||||
),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(existingAttachment.digest),
|
||||
},
|
||||
|
@ -624,7 +611,10 @@ describe('backup/attachments', () => {
|
|||
// The thumbnail will not have been copied over yet since it has not yet
|
||||
// been downloaded
|
||||
thumbnail: {
|
||||
...omit(quoteAttachment, ['iv', 'path', 'uploadTimestamp']),
|
||||
...omit(
|
||||
quoteAttachment,
|
||||
NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS
|
||||
),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(quoteAttachment.digest),
|
||||
},
|
||||
|
@ -854,12 +844,7 @@ describe('backup/attachments', () => {
|
|||
packKey,
|
||||
stickerId: 0,
|
||||
data: {
|
||||
...omit(attachment, [
|
||||
'iv',
|
||||
'path',
|
||||
'thumbnail',
|
||||
'uploadTimestamp',
|
||||
]),
|
||||
...omit(attachment, NON_ROUNDTRIPPED_BACKUP_LOCATOR_FIELDS),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
|
|
|
@ -4,6 +4,7 @@ import { assert } from 'chai';
|
|||
import Long from 'long';
|
||||
import { join } from 'path';
|
||||
import * as sinon from 'sinon';
|
||||
import { readFileSync } from 'fs';
|
||||
import { BackupLevel } from '@signalapp/libsignal-client/zkgroup';
|
||||
import { DataWriter } from '../../sql/Client';
|
||||
import { Backups } from '../../protobuf';
|
||||
|
@ -19,6 +20,8 @@ import { strictAssert } from '../../util/assert';
|
|||
import type { GetBackupCdnInfoType } from '../../services/backups/util/mediaId';
|
||||
import { MASTER_KEY } from './helpers';
|
||||
import { getRandomBytes } from '../../Crypto';
|
||||
import { generateKeys, safeUnlink } from '../../AttachmentCrypto';
|
||||
import { writeNewAttachmentData } from '../../windows/attachments';
|
||||
|
||||
describe('convertFilePointerToAttachment', () => {
|
||||
it('processes filepointer with attachmentLocator', () => {
|
||||
|
@ -190,7 +193,8 @@ function composeAttachment(
|
|||
incrementalMac: 'incrementalMac',
|
||||
incrementalMacChunkSize: 1000,
|
||||
uploadTimestamp: 1234,
|
||||
localKey: Bytes.toBase64(getRandomBytes(32)),
|
||||
localKey: Bytes.toBase64(generateKeys()),
|
||||
isReencryptableToSameDigest: true,
|
||||
version: 2,
|
||||
...overrides,
|
||||
};
|
||||
|
@ -429,30 +433,75 @@ describe('getFilePointerForAttachment', () => {
|
|||
});
|
||||
});
|
||||
describe('BackupLevel.Media', () => {
|
||||
describe('if missing critical decryption / encryption info', () => {
|
||||
const FILE_PATH = join(__dirname, '../../../fixtures/ghost-kitty.mp4');
|
||||
describe('if missing critical decryption / encryption info', async () => {
|
||||
let ciphertextFilePath: string;
|
||||
const attachmentNeedingEncryptionInfo: AttachmentType = {
|
||||
...downloadedAttachment,
|
||||
isReencryptableToSameDigest: false,
|
||||
};
|
||||
const plaintextFilePath = join(
|
||||
__dirname,
|
||||
'../../../fixtures/ghost-kitty.mp4'
|
||||
);
|
||||
|
||||
before(async () => {
|
||||
const locallyEncrypted = await writeNewAttachmentData({
|
||||
data: readFileSync(plaintextFilePath),
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
ciphertextFilePath =
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
locallyEncrypted.path
|
||||
);
|
||||
attachmentNeedingEncryptionInfo.localKey = locallyEncrypted.localKey;
|
||||
});
|
||||
beforeEach(() => {
|
||||
sandbox
|
||||
.stub(window.Signal.Migrations, 'getAbsoluteAttachmentPath')
|
||||
.callsFake(relPath => {
|
||||
if (relPath === downloadedAttachment.path) {
|
||||
return FILE_PATH;
|
||||
if (relPath === attachmentNeedingEncryptionInfo.path) {
|
||||
return ciphertextFilePath;
|
||||
}
|
||||
return relPath;
|
||||
});
|
||||
});
|
||||
after(async () => {
|
||||
if (ciphertextFilePath) {
|
||||
await safeUnlink(ciphertextFilePath);
|
||||
}
|
||||
});
|
||||
it('if existing (non-reencryptable digest) is already on backup tier, uses that backup locator', async () => {
|
||||
await testAttachmentToFilePointer(
|
||||
attachmentNeedingEncryptionInfo,
|
||||
new Backups.FilePointer({
|
||||
...filePointerWithBackupLocator,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
...defaultBackupLocator,
|
||||
cdnNumber: 12,
|
||||
}),
|
||||
}),
|
||||
{ backupLevel: BackupLevel.Media, backupCdnNumber: 12 }
|
||||
);
|
||||
});
|
||||
|
||||
it('if missing key, generates new key & digest and removes existing CDN info', async () => {
|
||||
const { filePointer: result } = await getFilePointerForAttachment({
|
||||
attachment: {
|
||||
...downloadedAttachment,
|
||||
key: undefined,
|
||||
},
|
||||
backupLevel: BackupLevel.Media,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
});
|
||||
const newKey = result.backupLocator?.key;
|
||||
const newDigest = result.backupLocator?.digest;
|
||||
it('if existing digest is non-reencryptable, generates new reencryption info', async () => {
|
||||
const { filePointer: result, updatedAttachment } =
|
||||
await getFilePointerForAttachment({
|
||||
attachment: attachmentNeedingEncryptionInfo,
|
||||
backupLevel: BackupLevel.Media,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
});
|
||||
|
||||
assert.isFalse(updatedAttachment?.isReencryptableToSameDigest);
|
||||
const newKey = updatedAttachment.reencryptionInfo?.key;
|
||||
const newDigest = updatedAttachment.reencryptionInfo?.digest;
|
||||
|
||||
strictAssert(newDigest, 'must create new digest');
|
||||
strictAssert(newKey, 'must create new key');
|
||||
|
||||
assert.notEqual(attachmentNeedingEncryptionInfo.key, newKey);
|
||||
assert.notEqual(attachmentNeedingEncryptionInfo.digest, newDigest);
|
||||
|
||||
strictAssert(newDigest, 'must create new digest');
|
||||
assert.deepStrictEqual(
|
||||
|
@ -461,9 +510,49 @@ describe('getFilePointerForAttachment', () => {
|
|||
...filePointerWithBackupLocator,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
...defaultBackupLocator,
|
||||
key: newKey,
|
||||
digest: newDigest,
|
||||
mediaName: Bytes.toHex(newDigest),
|
||||
key: Bytes.fromBase64(newKey),
|
||||
digest: Bytes.fromBase64(newDigest),
|
||||
mediaName: Bytes.toHex(Bytes.fromBase64(newDigest)),
|
||||
transitCdnKey: undefined,
|
||||
transitCdnNumber: undefined,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('without localKey, still able to regenerate encryption info', async () => {
|
||||
const { filePointer: result, updatedAttachment } =
|
||||
await getFilePointerForAttachment({
|
||||
attachment: {
|
||||
...attachmentNeedingEncryptionInfo,
|
||||
localKey: undefined,
|
||||
version: 1,
|
||||
path: plaintextFilePath,
|
||||
},
|
||||
backupLevel: BackupLevel.Media,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
});
|
||||
|
||||
assert.isFalse(updatedAttachment?.isReencryptableToSameDigest);
|
||||
const newKey = updatedAttachment.reencryptionInfo?.key;
|
||||
const newDigest = updatedAttachment.reencryptionInfo?.digest;
|
||||
|
||||
strictAssert(newDigest, 'must create new digest');
|
||||
strictAssert(newKey, 'must create new key');
|
||||
|
||||
assert.notEqual(attachmentNeedingEncryptionInfo.key, newKey);
|
||||
assert.notEqual(attachmentNeedingEncryptionInfo.digest, newDigest);
|
||||
|
||||
strictAssert(newDigest, 'must create new digest');
|
||||
assert.deepStrictEqual(
|
||||
result,
|
||||
new Backups.FilePointer({
|
||||
...filePointerWithBackupLocator,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
...defaultBackupLocator,
|
||||
key: Bytes.fromBase64(newKey),
|
||||
digest: Bytes.fromBase64(newDigest),
|
||||
mediaName: Bytes.toHex(Bytes.fromBase64(newDigest)),
|
||||
transitCdnKey: undefined,
|
||||
transitCdnNumber: undefined,
|
||||
}),
|
||||
|
@ -474,61 +563,46 @@ describe('getFilePointerForAttachment', () => {
|
|||
it('if file does not exist at local path, returns invalid attachment locator', async () => {
|
||||
await testAttachmentToFilePointer(
|
||||
{
|
||||
...downloadedAttachment,
|
||||
...attachmentNeedingEncryptionInfo,
|
||||
path: 'no/file/here.png',
|
||||
key: undefined,
|
||||
},
|
||||
filePointerWithInvalidLocator,
|
||||
{ backupLevel: BackupLevel.Media }
|
||||
);
|
||||
});
|
||||
|
||||
it('if not on backup tier, and missing iv, regenerates encryption info', async () => {
|
||||
const { filePointer: result } = await getFilePointerForAttachment({
|
||||
attachment: {
|
||||
...downloadedAttachment,
|
||||
iv: undefined,
|
||||
it('if new reencryptionInfo has already been generated, uses that', async () => {
|
||||
const attachmentWithReencryptionInfo = {
|
||||
...downloadedAttachment,
|
||||
isReencryptableToSameDigest: false,
|
||||
reencryptionInfo: {
|
||||
iv: 'newiv',
|
||||
digest: 'newdigest',
|
||||
key: 'newkey',
|
||||
},
|
||||
};
|
||||
|
||||
const { filePointer: result } = await getFilePointerForAttachment({
|
||||
attachment: attachmentWithReencryptionInfo,
|
||||
backupLevel: BackupLevel.Media,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
});
|
||||
|
||||
const newKey = result.backupLocator?.key;
|
||||
const newDigest = result.backupLocator?.digest;
|
||||
|
||||
strictAssert(newDigest, 'must create new digest');
|
||||
assert.deepStrictEqual(
|
||||
result,
|
||||
new Backups.FilePointer({
|
||||
...filePointerWithBackupLocator,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
...defaultBackupLocator,
|
||||
key: newKey,
|
||||
digest: newDigest,
|
||||
mediaName: Bytes.toHex(newDigest),
|
||||
key: Bytes.fromBase64('newkey'),
|
||||
digest: Bytes.fromBase64('newdigest'),
|
||||
mediaName: Bytes.toHex(Bytes.fromBase64('newdigest')),
|
||||
transitCdnKey: undefined,
|
||||
transitCdnNumber: undefined,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('if on backup tier, and not missing iv, does not regenerate encryption info', async () => {
|
||||
await testAttachmentToFilePointer(
|
||||
{
|
||||
...downloadedAttachment,
|
||||
iv: undefined,
|
||||
},
|
||||
new Backups.FilePointer({
|
||||
...filePointerWithBackupLocator,
|
||||
backupLocator: new Backups.FilePointer.BackupLocator({
|
||||
...defaultBackupLocator,
|
||||
cdnNumber: 12,
|
||||
}),
|
||||
}),
|
||||
{ backupLevel: BackupLevel.Media, backupCdnNumber: 12 }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns BackupLocator, with cdnNumber if in backup tier already', async () => {
|
||||
|
@ -549,7 +623,10 @@ describe('getFilePointerForAttachment', () => {
|
|||
await testAttachmentToFilePointer(
|
||||
downloadedAttachment,
|
||||
filePointerWithBackupLocator,
|
||||
{ backupLevel: BackupLevel.Media }
|
||||
{
|
||||
backupLevel: BackupLevel.Media,
|
||||
updatedAttachment: downloadedAttachment,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -582,7 +659,7 @@ describe('getBackupJobForAttachmentAndFilePointer', async () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('returns job if filePointer does have backupLocator', async () => {
|
||||
it('returns job if filePointer includes a backupLocator', async () => {
|
||||
const { filePointer, updatedAttachment } =
|
||||
await getFilePointerForAttachment({
|
||||
attachment,
|
||||
|
@ -639,4 +716,47 @@ describe('getBackupJobForAttachmentAndFilePointer', async () => {
|
|||
null
|
||||
);
|
||||
});
|
||||
|
||||
it('uses new encryption info if existing digest is not re-encryptable, and does not include transit info', async () => {
|
||||
const newDigest = Bytes.toBase64(Bytes.fromBase64('newdigest'));
|
||||
const attachmentWithReencryptionInfo = {
|
||||
...attachment,
|
||||
isReencryptableToSameDigest: false,
|
||||
reencryptionInfo: {
|
||||
iv: 'newiv',
|
||||
digest: newDigest,
|
||||
key: 'newkey',
|
||||
},
|
||||
};
|
||||
const { filePointer } = await getFilePointerForAttachment({
|
||||
attachment: attachmentWithReencryptionInfo,
|
||||
backupLevel: BackupLevel.Media,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
});
|
||||
|
||||
assert.deepStrictEqual(
|
||||
await maybeGetBackupJobForAttachmentAndFilePointer({
|
||||
attachment: attachmentWithReencryptionInfo,
|
||||
filePointer,
|
||||
messageReceivedAt: 100,
|
||||
getBackupCdnInfo: notInBackupCdn,
|
||||
}),
|
||||
{
|
||||
mediaName: Bytes.toHex(Bytes.fromBase64(newDigest)),
|
||||
receivedAt: 100,
|
||||
type: 'standard',
|
||||
data: {
|
||||
path: 'path/to/file.png',
|
||||
contentType: IMAGE_PNG,
|
||||
keys: 'newkey',
|
||||
digest: newDigest,
|
||||
iv: 'newiv',
|
||||
size: 100,
|
||||
localKey: attachmentWithReencryptionInfo.localKey,
|
||||
version: attachmentWithReencryptionInfo.version,
|
||||
transitCdnInfo: undefined,
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -435,14 +435,16 @@ describe('AttachmentDownloadManager/JobManager', () => {
|
|||
describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let downloadAttachment: sinon.SinonStub;
|
||||
|
||||
let processNewAttachment: sinon.SinonStub;
|
||||
beforeEach(async () => {
|
||||
sandbox = sinon.createSandbox();
|
||||
downloadAttachment = sandbox.stub().returns({
|
||||
path: '/path/to/file',
|
||||
iv: Buffer.alloc(16),
|
||||
plaintextHash: 'plaintextHash',
|
||||
isReencryptableToSameDigest: true,
|
||||
});
|
||||
processNewAttachment = sandbox.stub().callsFake(attachment => attachment);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
|
@ -458,7 +460,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
const result = await runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: true,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
});
|
||||
|
||||
assert.strictEqual(result.downloadedVariant, AttachmentVariant.Default);
|
||||
|
@ -482,7 +487,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
const result = await runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: true,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
});
|
||||
|
||||
strictAssert(
|
||||
|
@ -525,7 +533,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
const result = await runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: true,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
});
|
||||
assert.strictEqual(result.downloadedVariant, AttachmentVariant.Default);
|
||||
assert.strictEqual(downloadAttachment.callCount, 1);
|
||||
|
@ -554,7 +565,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: true,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -584,7 +598,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
const result = await runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: false,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
});
|
||||
assert.strictEqual(result.downloadedVariant, AttachmentVariant.Default);
|
||||
assert.strictEqual(downloadAttachment.callCount, 1);
|
||||
|
@ -618,7 +635,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
const result = await runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: false,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
});
|
||||
assert.strictEqual(
|
||||
result.downloadedVariant,
|
||||
|
@ -656,7 +676,10 @@ describe('AttachmentDownloadManager/runDownloadAttachmentJob', () => {
|
|||
runDownloadAttachmentJobInner({
|
||||
job,
|
||||
isForCurrentlyVisibleMessage: false,
|
||||
dependencies: { downloadAttachment },
|
||||
dependencies: {
|
||||
downloadAttachment,
|
||||
processNewAttachment,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
|
|
201
ts/test-electron/util/ensureAttachmentIsReencryptable_test.ts
Normal file
201
ts/test-electron/util/ensureAttachmentIsReencryptable_test.ts
Normal file
|
@ -0,0 +1,201 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { join } from 'path';
|
||||
import * as assert from 'assert';
|
||||
import Sinon from 'sinon';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { omit } from 'lodash';
|
||||
import { readFileSync, statSync } from 'fs';
|
||||
import type {
|
||||
AttachmentType,
|
||||
LocallySavedAttachment,
|
||||
} from '../../types/Attachment';
|
||||
import { IMAGE_JPEG } from '../../types/MIME';
|
||||
import {
|
||||
encryptAttachmentV2,
|
||||
generateAttachmentKeys,
|
||||
safeUnlink,
|
||||
} from '../../AttachmentCrypto';
|
||||
import { fromBase64, toBase64 } from '../../Bytes';
|
||||
import { ensureAttachmentIsReencryptable } from '../../util/ensureAttachmentIsReencryptable';
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import { writeNewAttachmentData } from '../../windows/attachments';
|
||||
|
||||
describe('utils/ensureAttachmentIsReencryptable', async () => {
|
||||
const fixturesDir = join(__dirname, '..', '..', '..', 'fixtures');
|
||||
const plaintextFilePath = join(fixturesDir, 'cat-screenshot.png');
|
||||
|
||||
const keys = generateAttachmentKeys();
|
||||
let digest: Uint8Array;
|
||||
let iv: Uint8Array;
|
||||
const { size } = statSync(plaintextFilePath);
|
||||
let sandbox: Sinon.SinonSandbox;
|
||||
|
||||
before(async () => {
|
||||
const encrypted = await encryptAttachmentV2({
|
||||
keys,
|
||||
plaintext: {
|
||||
absolutePath: plaintextFilePath,
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
digest = encrypted.digest;
|
||||
iv = encrypted.iv;
|
||||
|
||||
sandbox = Sinon.createSandbox();
|
||||
|
||||
const originalGetPath = window.Signal.Migrations.getAbsoluteAttachmentPath;
|
||||
sandbox
|
||||
.stub(window.Signal.Migrations, 'getAbsoluteAttachmentPath')
|
||||
.callsFake(relPath => {
|
||||
if (relPath === plaintextFilePath) {
|
||||
return plaintextFilePath;
|
||||
}
|
||||
return originalGetPath(relPath);
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
describe('v1 attachment', () => {
|
||||
function composeAttachment(
|
||||
overrides?: Partial<AttachmentType>
|
||||
): LocallySavedAttachment {
|
||||
return {
|
||||
contentType: IMAGE_JPEG,
|
||||
size,
|
||||
iv: toBase64(iv),
|
||||
key: toBase64(keys),
|
||||
digest: toBase64(digest),
|
||||
path: plaintextFilePath,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
it('returns original attachment if reencryptability has already been checked', async () => {
|
||||
const attachment = composeAttachment({
|
||||
isReencryptableToSameDigest: true,
|
||||
});
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(attachment, result);
|
||||
});
|
||||
|
||||
it('marks attachment as reencryptable if it is', async () => {
|
||||
const attachment = composeAttachment();
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: true },
|
||||
result
|
||||
);
|
||||
});
|
||||
it('marks attachment as unreencryptable and generates info if missing info', async () => {
|
||||
const attachment = composeAttachment({ iv: undefined });
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: false },
|
||||
omit(result, 'reencryptionInfo')
|
||||
);
|
||||
strictAssert(
|
||||
result.isReencryptableToSameDigest === false,
|
||||
'must be false'
|
||||
);
|
||||
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
|
||||
});
|
||||
it('marks attachment as unreencryptable and generates info if encrytion info exists but is wrong', async () => {
|
||||
const attachment = composeAttachment({ iv: toBase64(randomBytes(16)) });
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: false },
|
||||
omit(result, 'reencryptionInfo')
|
||||
);
|
||||
strictAssert(
|
||||
result.isReencryptableToSameDigest === false,
|
||||
'must be false'
|
||||
);
|
||||
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
|
||||
});
|
||||
});
|
||||
describe('v2 attachment', () => {
|
||||
let localKey: string;
|
||||
let path: string;
|
||||
|
||||
before(async () => {
|
||||
const encryptedLocally = await writeNewAttachmentData({
|
||||
data: readFileSync(plaintextFilePath),
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
localKey = encryptedLocally.localKey;
|
||||
path = encryptedLocally.path;
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
if (path) {
|
||||
await safeUnlink(path);
|
||||
}
|
||||
});
|
||||
|
||||
function composeAttachment(
|
||||
overrides?: Partial<AttachmentType>
|
||||
): LocallySavedAttachment {
|
||||
return {
|
||||
contentType: IMAGE_JPEG,
|
||||
size,
|
||||
iv: toBase64(iv),
|
||||
key: toBase64(keys),
|
||||
digest: toBase64(digest),
|
||||
path,
|
||||
version: 2,
|
||||
localKey,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
it('returns original attachment if reencryptability has already been checked', async () => {
|
||||
const attachment = composeAttachment({
|
||||
isReencryptableToSameDigest: true,
|
||||
});
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(attachment, result);
|
||||
});
|
||||
|
||||
it('marks attachment as reencryptable if it is', async () => {
|
||||
const attachment = composeAttachment();
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: true },
|
||||
result
|
||||
);
|
||||
});
|
||||
it('marks attachment as unreencryptable and generates info if missing info', async () => {
|
||||
const attachment = composeAttachment({ iv: undefined });
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: false },
|
||||
omit(result, 'reencryptionInfo')
|
||||
);
|
||||
strictAssert(
|
||||
result.isReencryptableToSameDigest === false,
|
||||
'must be false'
|
||||
);
|
||||
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
|
||||
});
|
||||
it('marks attachment as unreencryptable and generates info if encrytion info exists but is wrong', async () => {
|
||||
const attachment = composeAttachment({ iv: toBase64(randomBytes(16)) });
|
||||
const result = await ensureAttachmentIsReencryptable(attachment);
|
||||
assert.deepStrictEqual(
|
||||
{ ...attachment, isReencryptableToSameDigest: false },
|
||||
omit(result, 'reencryptionInfo')
|
||||
);
|
||||
strictAssert(
|
||||
result.isReencryptableToSameDigest === false,
|
||||
'must be false'
|
||||
);
|
||||
assert.strictEqual(fromBase64(result.reencryptionInfo.iv).byteLength, 16);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -4,7 +4,7 @@
|
|||
import assert from 'assert';
|
||||
import fs from 'fs/promises';
|
||||
import crypto from 'crypto';
|
||||
import path from 'path';
|
||||
import path, { join } from 'path';
|
||||
import os from 'os';
|
||||
import createDebug from 'debug';
|
||||
import pTimeout from 'p-timeout';
|
||||
|
@ -25,6 +25,7 @@ import { drop } from '../util/drop';
|
|||
import type { RendererConfigType } from '../types/RendererConfig';
|
||||
import { App } from './playwright';
|
||||
import { CONTACT_COUNT } from './benchmarks/fixtures';
|
||||
import { strictAssert } from '../util/assert';
|
||||
|
||||
export { App };
|
||||
|
||||
|
@ -553,6 +554,11 @@ export class Bootstrap {
|
|||
};
|
||||
}
|
||||
|
||||
public getAbsoluteAttachmentPath(relativePath: string): string {
|
||||
strictAssert(this.storagePath, 'storagePath must exist');
|
||||
return join(this.storagePath, 'attachments.noindex', relativePath);
|
||||
}
|
||||
|
||||
//
|
||||
// Getters
|
||||
//
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import createDebug from 'debug';
|
||||
import { assert } from 'chai';
|
||||
import { expect } from 'playwright/test';
|
||||
import { readFileSync } from 'fs';
|
||||
import { type PrimaryDevice, StorageState } from '@signalapp/mock-server';
|
||||
import * as path from 'path';
|
||||
import type { App } from '../playwright';
|
||||
|
@ -15,9 +17,25 @@ import {
|
|||
} from '../helpers';
|
||||
import * as durations from '../../util/durations';
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import {
|
||||
encryptAttachmentV2ToDisk,
|
||||
generateAttachmentKeys,
|
||||
} from '../../AttachmentCrypto';
|
||||
import { toBase64 } from '../../Bytes';
|
||||
import type { AttachmentWithNewReencryptionInfoType } from '../../types/Attachment';
|
||||
import { IMAGE_JPEG } from '../../types/MIME';
|
||||
|
||||
export const debug = createDebug('mock:test:attachments');
|
||||
|
||||
const CAT_PATH = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'fixtures',
|
||||
'cat-screenshot.png'
|
||||
);
|
||||
|
||||
describe('attachments', function (this: Mocha.Suite) {
|
||||
this.timeout(durations.MINUTE);
|
||||
|
||||
|
@ -65,7 +83,7 @@ describe('attachments', function (this: Mocha.Suite) {
|
|||
page,
|
||||
pinned,
|
||||
'This is my cat',
|
||||
[path.join(__dirname, '..', '..', '..', 'fixtures', 'cat-screenshot.png')]
|
||||
[CAT_PATH]
|
||||
);
|
||||
|
||||
const Message = getTimelineMessageWithText(page, 'This is my cat');
|
||||
|
@ -78,6 +96,17 @@ describe('attachments', function (this: Mocha.Suite) {
|
|||
const timestamp = await Message.getAttribute('data-testid');
|
||||
strictAssert(timestamp, 'timestamp must exist');
|
||||
|
||||
const sentMessage = (
|
||||
await app.getMessagesBySentAt(parseInt(timestamp, 10))
|
||||
)[0];
|
||||
strictAssert(sentMessage, 'message exists in DB');
|
||||
const sentAttachment = sentMessage.attachments?.[0];
|
||||
assert.isTrue(sentAttachment?.isReencryptableToSameDigest);
|
||||
assert.isUndefined(
|
||||
(sentAttachment as unknown as AttachmentWithNewReencryptionInfoType)
|
||||
.reencryptionInfo
|
||||
);
|
||||
|
||||
// For this test, just send back the same attachment that was uploaded to test a
|
||||
// round-trip
|
||||
const incomingTimestamp = Date.now();
|
||||
|
@ -95,5 +124,95 @@ describe('attachments', function (this: Mocha.Suite) {
|
|||
'img.module-image__image'
|
||||
)
|
||||
).toBeVisible();
|
||||
|
||||
const incomingMessage = (
|
||||
await app.getMessagesBySentAt(incomingTimestamp)
|
||||
)[0];
|
||||
strictAssert(incomingMessage, 'message exists in DB');
|
||||
const incomingAttachment = incomingMessage.attachments?.[0];
|
||||
assert.isTrue(incomingAttachment?.isReencryptableToSameDigest);
|
||||
assert.isUndefined(
|
||||
(incomingAttachment as unknown as AttachmentWithNewReencryptionInfoType)
|
||||
.reencryptionInfo
|
||||
);
|
||||
assert.strictEqual(incomingAttachment?.key, sentAttachment?.key);
|
||||
assert.strictEqual(incomingAttachment?.digest, sentAttachment?.digest);
|
||||
});
|
||||
|
||||
it('receiving attachments with non-zero padding will cause new re-encryption info to be generated', async () => {
|
||||
const page = await app.getWindow();
|
||||
|
||||
await page.getByTestId(pinned.device.aci).click();
|
||||
|
||||
const plaintextCat = readFileSync(CAT_PATH);
|
||||
|
||||
const cdnKey = 'cdnKey';
|
||||
const keys = generateAttachmentKeys();
|
||||
const cdnNumber = 3;
|
||||
|
||||
const { digest: newDigest, path: ciphertextPath } =
|
||||
await encryptAttachmentV2ToDisk({
|
||||
keys,
|
||||
plaintext: {
|
||||
// add non-zero byte to the end of the data; this will be considered padding
|
||||
// when received since we will include the size of the un-appended data when
|
||||
// sending
|
||||
data: Buffer.concat([plaintextCat, Buffer.from([1])]),
|
||||
},
|
||||
getAbsoluteAttachmentPath: relativePath =>
|
||||
bootstrap.getAbsoluteAttachmentPath(relativePath),
|
||||
});
|
||||
|
||||
const ciphertextCatWithNonZeroPadding = readFileSync(
|
||||
bootstrap.getAbsoluteAttachmentPath(ciphertextPath)
|
||||
);
|
||||
|
||||
bootstrap.server.storeAttachmentOnCdn(
|
||||
cdnNumber,
|
||||
cdnKey,
|
||||
ciphertextCatWithNonZeroPadding
|
||||
);
|
||||
|
||||
const incomingTimestamp = Date.now();
|
||||
await sendTextMessage({
|
||||
from: pinned,
|
||||
to: bootstrap.desktop,
|
||||
desktop: bootstrap.desktop,
|
||||
text: 'Wait, that is MY cat! But now with weird padding!',
|
||||
attachments: [
|
||||
{
|
||||
size: plaintextCat.byteLength,
|
||||
contentType: IMAGE_JPEG,
|
||||
cdnKey,
|
||||
cdnNumber,
|
||||
key: keys,
|
||||
digest: newDigest,
|
||||
},
|
||||
],
|
||||
timestamp: incomingTimestamp,
|
||||
});
|
||||
|
||||
await expect(
|
||||
getMessageInTimelineByTimestamp(page, incomingTimestamp).locator(
|
||||
'img.module-image__image'
|
||||
)
|
||||
).toBeVisible();
|
||||
|
||||
const incomingMessage = (
|
||||
await app.getMessagesBySentAt(incomingTimestamp)
|
||||
)[0];
|
||||
strictAssert(incomingMessage, 'message exists in DB');
|
||||
const incomingAttachment = incomingMessage.attachments?.[0];
|
||||
|
||||
assert.isFalse(incomingAttachment?.isReencryptableToSameDigest);
|
||||
assert.exists(incomingAttachment?.reencryptionInfo);
|
||||
assert.exists(incomingAttachment?.reencryptionInfo.digest);
|
||||
|
||||
assert.strictEqual(incomingAttachment?.key, toBase64(keys));
|
||||
assert.strictEqual(incomingAttachment?.digest, toBase64(newDigest));
|
||||
assert.notEqual(
|
||||
incomingAttachment?.digest,
|
||||
incomingAttachment.reencryptionInfo.digest
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -13,6 +13,7 @@ import type {
|
|||
import type { ReceiptType } from '../types/Receipt';
|
||||
import { SECOND } from '../util/durations';
|
||||
import { drop } from '../util/drop';
|
||||
import type { MessageAttributesType } from '../model-types';
|
||||
|
||||
export type AppLoadedInfoType = Readonly<{
|
||||
loadTime: number;
|
||||
|
@ -178,6 +179,13 @@ export class App extends EventEmitter {
|
|||
);
|
||||
}
|
||||
|
||||
public async getMessagesBySentAt(
|
||||
timestamp: number
|
||||
): Promise<Array<MessageAttributesType>> {
|
||||
const window = await this.getWindow();
|
||||
return window.evaluate(`window.SignalCI.getMessagesBySentAt(${timestamp})`);
|
||||
}
|
||||
|
||||
public async exportBackupToDisk(path: string): Promise<Uint8Array> {
|
||||
const window = await this.getWindow();
|
||||
return window.evaluate(
|
||||
|
|
|
@ -13,6 +13,7 @@ import type { EmbeddedContactType } from '../../types/EmbeddedContact';
|
|||
import type { MessageAttributesType } from '../../model-types.d';
|
||||
import type {
|
||||
AddressableAttachmentType,
|
||||
AttachmentType,
|
||||
LocalAttachmentV2Type,
|
||||
} from '../../types/Attachment';
|
||||
import type { LoggerType } from '../../types/Logging';
|
||||
|
@ -692,6 +693,120 @@ describe('Message', () => {
|
|||
assert.deepEqual(result, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_mapAllAttachments', () => {
|
||||
function composeAttachment(
|
||||
overrides?: Partial<AttachmentType>
|
||||
): AttachmentType {
|
||||
return {
|
||||
size: 128,
|
||||
contentType: MIME.IMAGE_JPEG,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
it('updates all attachments on message', async () => {
|
||||
const upgradeAttachment = (attachment: AttachmentType) =>
|
||||
Promise.resolve({ ...attachment, key: 'upgradedKey' });
|
||||
|
||||
const upgradeVersion = Message._mapAllAttachments(upgradeAttachment);
|
||||
|
||||
const message = getDefaultMessage({
|
||||
body: 'hey there!',
|
||||
attachments: [
|
||||
composeAttachment({ path: '/attachment/1' }),
|
||||
composeAttachment({ path: '/attachment/2' }),
|
||||
],
|
||||
quote: {
|
||||
text: 'quote!',
|
||||
attachments: [
|
||||
{
|
||||
contentType: MIME.TEXT_ATTACHMENT,
|
||||
thumbnail: composeAttachment({ path: 'quoted/thumbnail' }),
|
||||
},
|
||||
],
|
||||
id: 34233,
|
||||
isViewOnce: false,
|
||||
messageId: 'message-id',
|
||||
referencedMessageNotFound: false,
|
||||
},
|
||||
preview: [
|
||||
{ url: 'url', image: composeAttachment({ path: 'preview/image' }) },
|
||||
],
|
||||
contact: [
|
||||
{
|
||||
avatar: {
|
||||
isProfile: false,
|
||||
avatar: composeAttachment({ path: 'contact/avatar' }),
|
||||
},
|
||||
},
|
||||
],
|
||||
sticker: {
|
||||
packId: 'packId',
|
||||
stickerId: 1,
|
||||
packKey: 'packKey',
|
||||
data: composeAttachment({ path: 'sticker/data' }),
|
||||
},
|
||||
bodyAttachment: composeAttachment({ path: 'body/attachment' }),
|
||||
});
|
||||
|
||||
const expected = getDefaultMessage({
|
||||
body: 'hey there!',
|
||||
attachments: [
|
||||
composeAttachment({ path: '/attachment/1', key: 'upgradedKey' }),
|
||||
composeAttachment({ path: '/attachment/2', key: 'upgradedKey' }),
|
||||
],
|
||||
quote: {
|
||||
text: 'quote!',
|
||||
attachments: [
|
||||
{
|
||||
contentType: MIME.TEXT_ATTACHMENT,
|
||||
thumbnail: composeAttachment({
|
||||
path: 'quoted/thumbnail',
|
||||
key: 'upgradedKey',
|
||||
}),
|
||||
},
|
||||
],
|
||||
id: 34233,
|
||||
isViewOnce: false,
|
||||
messageId: 'message-id',
|
||||
referencedMessageNotFound: false,
|
||||
},
|
||||
preview: [
|
||||
{
|
||||
url: 'url',
|
||||
image: composeAttachment({
|
||||
path: 'preview/image',
|
||||
key: 'upgradedKey',
|
||||
}),
|
||||
},
|
||||
],
|
||||
contact: [
|
||||
{
|
||||
avatar: {
|
||||
isProfile: false,
|
||||
avatar: composeAttachment({
|
||||
path: 'contact/avatar',
|
||||
key: 'upgradedKey',
|
||||
}),
|
||||
},
|
||||
},
|
||||
],
|
||||
sticker: {
|
||||
packId: 'packId',
|
||||
stickerId: 1,
|
||||
packKey: 'packKey',
|
||||
data: composeAttachment({ path: 'sticker/data', key: 'upgradedKey' }),
|
||||
},
|
||||
bodyAttachment: composeAttachment({
|
||||
path: 'body/attachment',
|
||||
key: 'upgradedKey',
|
||||
}),
|
||||
});
|
||||
const result = await upgradeVersion(message, getDefaultContext());
|
||||
assert.deepEqual(result, expected);
|
||||
});
|
||||
});
|
||||
describe('migrateBodyAttachmentToDisk', () => {
|
||||
it('writes long text attachment to disk, but does not truncate body', async () => {
|
||||
const message = getDefaultMessage({
|
||||
|
|
|
@ -31,6 +31,7 @@ import type { SignalService as Proto } from '../protobuf';
|
|||
import { isMoreRecentThan } from '../util/timestamp';
|
||||
import { DAY } from '../util/durations';
|
||||
import { getLocalAttachmentUrl } from '../util/getLocalAttachmentUrl';
|
||||
import type { ReencryptionInfo } from '../AttachmentCrypto';
|
||||
|
||||
const MAX_WIDTH = 300;
|
||||
const MAX_HEIGHT = MAX_WIDTH * 1.5;
|
||||
|
@ -106,7 +107,15 @@ export type AttachmentType = {
|
|||
|
||||
/** Legacy field, used long ago for migrating attachments to disk. */
|
||||
schemaVersion?: number;
|
||||
};
|
||||
} & (
|
||||
| {
|
||||
isReencryptableToSameDigest?: true;
|
||||
}
|
||||
| {
|
||||
isReencryptableToSameDigest: false;
|
||||
reencryptionInfo?: ReencryptionInfo;
|
||||
}
|
||||
);
|
||||
|
||||
export type LocalAttachmentV2Type = Readonly<{
|
||||
version: 2;
|
||||
|
@ -142,6 +151,7 @@ export type UploadedAttachmentType = Proto.IAttachmentPointer &
|
|||
digest: Uint8Array;
|
||||
contentType: string;
|
||||
plaintextHash: string;
|
||||
isReencryptableToSameDigest: true;
|
||||
}>;
|
||||
|
||||
export type AttachmentWithHydratedData = AttachmentType & {
|
||||
|
@ -1070,17 +1080,28 @@ export function getAttachmentSignature(attachment: AttachmentType): string {
|
|||
}
|
||||
|
||||
type RequiredPropertiesForDecryption = 'key' | 'digest';
|
||||
type RequiredPropertiesForReencryption = 'key' | 'digest' | 'iv';
|
||||
type RequiredPropertiesForReencryption = 'path' | 'key' | 'digest' | 'iv';
|
||||
|
||||
type DecryptableAttachment = WithRequiredProperties<
|
||||
AttachmentType,
|
||||
RequiredPropertiesForDecryption
|
||||
>;
|
||||
|
||||
type ReencryptableAttachment = WithRequiredProperties<
|
||||
export type AttachmentWithNewReencryptionInfoType = Omit<
|
||||
AttachmentType,
|
||||
RequiredPropertiesForReencryption
|
||||
>;
|
||||
'isReencryptableToSameDigest'
|
||||
> & {
|
||||
isReencryptableToSameDigest: false;
|
||||
reencryptionInfo: ReencryptionInfo;
|
||||
};
|
||||
type AttachmentReencryptableToExistingDigestType = Omit<
|
||||
WithRequiredProperties<AttachmentType, RequiredPropertiesForReencryption>,
|
||||
'isReencryptableToSameDigest'
|
||||
> & { isReencryptableToSameDigest: true };
|
||||
|
||||
export type ReencryptableAttachment =
|
||||
| AttachmentWithNewReencryptionInfoType
|
||||
| AttachmentReencryptableToExistingDigestType;
|
||||
|
||||
export type AttachmentDownloadableFromTransitTier = WithRequiredProperties<
|
||||
DecryptableAttachment,
|
||||
|
@ -1097,24 +1118,40 @@ export type LocallySavedAttachment = WithRequiredProperties<
|
|||
'path'
|
||||
>;
|
||||
|
||||
export type AttachmentReadyForBackup = WithRequiredProperties<
|
||||
LocallySavedAttachment,
|
||||
RequiredPropertiesForReencryption
|
||||
>;
|
||||
|
||||
export function isDecryptable(
|
||||
attachment: AttachmentType
|
||||
): attachment is DecryptableAttachment {
|
||||
return Boolean(attachment.key) && Boolean(attachment.digest);
|
||||
}
|
||||
|
||||
export function hasAllOriginalEncryptionInfo(
|
||||
attachment: AttachmentType
|
||||
): attachment is WithRequiredProperties<
|
||||
AttachmentType,
|
||||
'iv' | 'key' | 'digest'
|
||||
> {
|
||||
return (
|
||||
Boolean(attachment.iv) &&
|
||||
Boolean(attachment.key) &&
|
||||
Boolean(attachment.digest)
|
||||
);
|
||||
}
|
||||
|
||||
export function isReencryptableToSameDigest(
|
||||
attachment: AttachmentType
|
||||
): attachment is ReencryptableAttachment {
|
||||
): attachment is AttachmentReencryptableToExistingDigestType {
|
||||
return (
|
||||
Boolean(attachment.key) &&
|
||||
Boolean(attachment.digest) &&
|
||||
Boolean(attachment.iv)
|
||||
hasAllOriginalEncryptionInfo(attachment) &&
|
||||
Boolean(attachment.isReencryptableToSameDigest)
|
||||
);
|
||||
}
|
||||
|
||||
export function isReencryptableWithNewEncryptionInfo(
|
||||
attachment: AttachmentType
|
||||
): attachment is AttachmentWithNewReencryptionInfoType {
|
||||
return (
|
||||
attachment.isReencryptableToSameDigest === false &&
|
||||
Boolean(attachment.reencryptionInfo)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import type {
|
|||
} from './Attachment';
|
||||
import {
|
||||
captureDimensionsAndScreenshot,
|
||||
isAttachmentLocallySaved,
|
||||
removeSchemaVersion,
|
||||
replaceUnicodeOrderOverrides,
|
||||
replaceUnicodeV2,
|
||||
|
@ -48,6 +49,7 @@ import { encryptLegacyAttachment } from '../util/encryptLegacyAttachment';
|
|||
import { deepClone } from '../util/deepClone';
|
||||
import { LONG_ATTACHMENT_LIMIT } from './Message';
|
||||
import * as Bytes from '../Bytes';
|
||||
import { ensureAttachmentIsReencryptable } from '../util/ensureAttachmentIsReencryptable';
|
||||
|
||||
export const GROUP = 'group';
|
||||
export const PRIVATE = 'private';
|
||||
|
@ -134,6 +136,8 @@ export type ContextWithMessageType = ContextType & {
|
|||
// - Attachments: encrypt attachments on disk
|
||||
// Version 13:
|
||||
// - Attachments: write bodyAttachment to disk
|
||||
// Version 14
|
||||
// - All attachments: ensure they are reencryptable to a known digest
|
||||
|
||||
const INITIAL_SCHEMA_VERSION = 0;
|
||||
|
||||
|
@ -281,6 +285,52 @@ export const _mapAttachments =
|
|||
return { ...message, attachments };
|
||||
};
|
||||
|
||||
export const _mapAllAttachments =
|
||||
(upgradeAttachment: UpgradeAttachmentType) =>
|
||||
async (
|
||||
message: MessageAttributesType,
|
||||
context: ContextType
|
||||
): Promise<MessageAttributesType> => {
|
||||
let result = { ...message };
|
||||
result = await _mapAttachments(upgradeAttachment)(result, context);
|
||||
result = await _mapQuotedAttachments(upgradeAttachment)(result, context);
|
||||
result = await _mapPreviewAttachments(upgradeAttachment)(result, context);
|
||||
result = await _mapContact(async contact => {
|
||||
if (!contact.avatar?.avatar) {
|
||||
return contact;
|
||||
}
|
||||
|
||||
return {
|
||||
...contact,
|
||||
avatar: {
|
||||
...contact.avatar,
|
||||
avatar: await upgradeAttachment(
|
||||
contact.avatar.avatar,
|
||||
context,
|
||||
result
|
||||
),
|
||||
},
|
||||
};
|
||||
})(result, context);
|
||||
|
||||
if (result.sticker?.data) {
|
||||
result.sticker.data = await upgradeAttachment(
|
||||
result.sticker.data,
|
||||
context,
|
||||
result
|
||||
);
|
||||
}
|
||||
if (result.bodyAttachment) {
|
||||
result.bodyAttachment = await upgradeAttachment(
|
||||
result.bodyAttachment,
|
||||
context,
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Public API
|
||||
// _mapContact :: (Contact -> Promise Contact) ->
|
||||
// (Message, Context) ->
|
||||
|
@ -583,6 +633,21 @@ const toVersion13 = _withSchemaVersion({
|
|||
upgrade: migrateBodyAttachmentToDisk,
|
||||
});
|
||||
|
||||
const toVersion14 = _withSchemaVersion({
|
||||
schemaVersion: 14,
|
||||
upgrade: _mapAllAttachments(async attachment => {
|
||||
if (!isAttachmentLocallySaved(attachment)) {
|
||||
return attachment;
|
||||
}
|
||||
if (!attachment.digest) {
|
||||
// this attachment has not been encrypted yet; this would be expected for messages
|
||||
// that are being upgraded prior to being sent
|
||||
return attachment;
|
||||
}
|
||||
return ensureAttachmentIsReencryptable(attachment);
|
||||
}),
|
||||
});
|
||||
|
||||
const VERSIONS = [
|
||||
toVersion0,
|
||||
toVersion1,
|
||||
|
@ -598,6 +663,7 @@ const VERSIONS = [
|
|||
toVersion11,
|
||||
toVersion12,
|
||||
toVersion13,
|
||||
toVersion14,
|
||||
];
|
||||
|
||||
export const CURRENT_SCHEMA_VERSION = VERSIONS.length - 1;
|
||||
|
@ -731,15 +797,25 @@ export const processNewAttachment = async (
|
|||
throw new TypeError('context.logger is required');
|
||||
}
|
||||
|
||||
const finalAttachment = await captureDimensionsAndScreenshot(attachment, {
|
||||
writeNewAttachmentData,
|
||||
makeObjectUrl,
|
||||
revokeObjectUrl,
|
||||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
});
|
||||
let upgradedAttachment = attachment;
|
||||
|
||||
if (isAttachmentLocallySaved(upgradedAttachment)) {
|
||||
upgradedAttachment =
|
||||
await ensureAttachmentIsReencryptable(upgradedAttachment);
|
||||
}
|
||||
|
||||
const finalAttachment = await captureDimensionsAndScreenshot(
|
||||
upgradedAttachment,
|
||||
{
|
||||
writeNewAttachmentData,
|
||||
makeObjectUrl,
|
||||
revokeObjectUrl,
|
||||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
}
|
||||
);
|
||||
|
||||
return finalAttachment;
|
||||
};
|
||||
|
|
|
@ -78,7 +78,14 @@ export const downscaleOutgoingAttachment = async (
|
|||
|
||||
export type CdnFieldsType = Pick<
|
||||
AttachmentType,
|
||||
'cdnId' | 'cdnKey' | 'cdnNumber' | 'key' | 'digest' | 'iv' | 'plaintextHash'
|
||||
| 'cdnId'
|
||||
| 'cdnKey'
|
||||
| 'cdnNumber'
|
||||
| 'key'
|
||||
| 'digest'
|
||||
| 'iv'
|
||||
| 'plaintextHash'
|
||||
| 'isReencryptableToSameDigest'
|
||||
>;
|
||||
|
||||
export function copyCdnFields(
|
||||
|
@ -95,5 +102,6 @@ export function copyCdnFields(
|
|||
iv: Bytes.toBase64(uploaded.iv),
|
||||
digest: Bytes.toBase64(uploaded.digest),
|
||||
plaintextHash: uploaded.plaintextHash,
|
||||
isReencryptableToSameDigest: uploaded.isReencryptableToSameDigest,
|
||||
};
|
||||
}
|
||||
|
|
179
ts/util/ensureAttachmentIsReencryptable.ts
Normal file
179
ts/util/ensureAttachmentIsReencryptable.ts
Normal file
|
@ -0,0 +1,179 @@
|
|||
// Copyright 2024 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { PassThrough } from 'stream';
|
||||
import {
|
||||
type EncryptedAttachmentV2,
|
||||
type ReencryptionInfo,
|
||||
decryptAttachmentV2ToSink,
|
||||
encryptAttachmentV2,
|
||||
generateAttachmentKeys,
|
||||
} from '../AttachmentCrypto';
|
||||
import {
|
||||
type AddressableAttachmentType,
|
||||
type LocallySavedAttachment,
|
||||
type ReencryptableAttachment,
|
||||
hasAllOriginalEncryptionInfo,
|
||||
isReencryptableToSameDigest,
|
||||
isReencryptableWithNewEncryptionInfo,
|
||||
} from '../types/Attachment';
|
||||
import { strictAssert } from './assert';
|
||||
import * as logging from '../logging/log';
|
||||
import { fromBase64, toBase64 } from '../Bytes';
|
||||
|
||||
/**
|
||||
* Some attachments on desktop are not reencryptable to the digest we received for them.
|
||||
* This is because:
|
||||
* 1. desktop has not always saved iv & key for attachments
|
||||
* 2. android has in the past sent attachments with non-zero (random) padding
|
||||
*
|
||||
* In these cases we need to generate a new iv and key to recalculate a digest that we can
|
||||
* put in the backup proto at export time.
|
||||
*/
|
||||
|
||||
export async function ensureAttachmentIsReencryptable(
|
||||
attachment: LocallySavedAttachment
|
||||
): Promise<ReencryptableAttachment> {
|
||||
if (isReencryptableToSameDigest(attachment)) {
|
||||
return attachment;
|
||||
}
|
||||
|
||||
if (isReencryptableWithNewEncryptionInfo(attachment)) {
|
||||
return attachment;
|
||||
}
|
||||
|
||||
if (hasAllOriginalEncryptionInfo(attachment)) {
|
||||
try {
|
||||
await attemptToReencryptToOriginalDigest(attachment);
|
||||
return {
|
||||
...attachment,
|
||||
isReencryptableToSameDigest: true,
|
||||
};
|
||||
} catch (e) {
|
||||
logging.info(
|
||||
'Unable to reencrypt attachment to original digest; must have had non-zero padding'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...attachment,
|
||||
isReencryptableToSameDigest: false,
|
||||
reencryptionInfo: await generateNewEncryptionInfoForAttachment(attachment),
|
||||
};
|
||||
}
|
||||
|
||||
/** Will throw if attachment cannot be reencrypted to original digest */
|
||||
export async function attemptToReencryptToOriginalDigest(
|
||||
attachment: Readonly<LocallySavedAttachment>
|
||||
): Promise<void> {
|
||||
if (!hasAllOriginalEncryptionInfo(attachment)) {
|
||||
throw new Error('attachment must have info for reencryption');
|
||||
}
|
||||
|
||||
const { iv, key, digest } = attachment;
|
||||
|
||||
if (!attachment.localKey) {
|
||||
await encryptAttachmentV2({
|
||||
keys: fromBase64(key),
|
||||
dangerousIv: {
|
||||
iv: fromBase64(iv),
|
||||
reason: 'reencrypting-for-backup',
|
||||
digestToMatch: fromBase64(digest),
|
||||
},
|
||||
plaintext: {
|
||||
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
} else {
|
||||
strictAssert(attachment.size != null, 'Size must exist');
|
||||
|
||||
const passthrough = new PassThrough();
|
||||
await Promise.all([
|
||||
decryptAttachmentV2ToSink(
|
||||
{
|
||||
ciphertextPath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
idForLogging: 'attemptToReencryptToOriginalDigest',
|
||||
size: attachment.size,
|
||||
keysBase64: attachment.localKey,
|
||||
type: 'local',
|
||||
},
|
||||
passthrough
|
||||
),
|
||||
encryptAttachmentV2({
|
||||
plaintext: {
|
||||
stream: passthrough,
|
||||
},
|
||||
keys: fromBase64(key),
|
||||
dangerousIv: {
|
||||
iv: fromBase64(iv),
|
||||
reason: 'reencrypting-for-backup',
|
||||
digestToMatch: fromBase64(digest),
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
export async function generateNewEncryptionInfoForAttachment(
|
||||
attachment: Readonly<AddressableAttachmentType>
|
||||
): Promise<ReencryptionInfo> {
|
||||
const newKeys = generateAttachmentKeys();
|
||||
|
||||
let encryptedAttachment: EncryptedAttachmentV2;
|
||||
|
||||
if (!attachment.localKey) {
|
||||
encryptedAttachment = await encryptAttachmentV2({
|
||||
keys: newKeys,
|
||||
plaintext: {
|
||||
absolutePath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
});
|
||||
} else {
|
||||
const passthrough = new PassThrough();
|
||||
strictAssert(attachment.size != null, 'Size must exist');
|
||||
|
||||
const result = await Promise.all([
|
||||
decryptAttachmentV2ToSink(
|
||||
{
|
||||
ciphertextPath: window.Signal.Migrations.getAbsoluteAttachmentPath(
|
||||
attachment.path
|
||||
),
|
||||
idForLogging: 'generateNewEncryptionInfoForAttachment',
|
||||
size: attachment.size,
|
||||
keysBase64: attachment.localKey,
|
||||
type: 'local',
|
||||
},
|
||||
passthrough
|
||||
),
|
||||
encryptAttachmentV2({
|
||||
keys: newKeys,
|
||||
plaintext: {
|
||||
stream: passthrough,
|
||||
},
|
||||
getAbsoluteAttachmentPath:
|
||||
window.Signal.Migrations.getAbsoluteAttachmentPath,
|
||||
}),
|
||||
]);
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
encryptedAttachment = result[1];
|
||||
}
|
||||
|
||||
return {
|
||||
digest: toBase64(encryptedAttachment.digest),
|
||||
iv: toBase64(encryptedAttachment.iv),
|
||||
key: toBase64(newKeys),
|
||||
};
|
||||
}
|
|
@ -2,25 +2,51 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { Transform } from 'node:stream';
|
||||
import { strictAssert } from './assert';
|
||||
|
||||
/**
|
||||
* Truncates the stream to the target size.
|
||||
* Truncates the stream to the target size and analyzes padding type.
|
||||
*/
|
||||
export function trimPadding(size: number): Transform {
|
||||
export function trimPadding(
|
||||
size: number,
|
||||
onPaddingAnalyzed: ({
|
||||
isPaddingAllZeros,
|
||||
}: {
|
||||
isPaddingAllZeros: boolean;
|
||||
}) => void
|
||||
): Transform {
|
||||
let total = 0;
|
||||
let seenNonZeroPadding = false;
|
||||
return new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
strictAssert(chunk instanceof Uint8Array, 'chunk must be Uint8Array');
|
||||
const chunkSize = chunk.byteLength;
|
||||
const sizeLeft = size - total;
|
||||
let paddingInThisChunk: Uint8Array | undefined;
|
||||
if (sizeLeft >= chunkSize) {
|
||||
total += chunkSize;
|
||||
callback(null, chunk);
|
||||
} else if (sizeLeft > 0) {
|
||||
total += sizeLeft;
|
||||
callback(null, chunk.subarray(0, sizeLeft));
|
||||
const data = chunk.subarray(0, sizeLeft);
|
||||
paddingInThisChunk = chunk.subarray(sizeLeft);
|
||||
callback(null, data);
|
||||
} else {
|
||||
paddingInThisChunk = chunk;
|
||||
callback(null, null);
|
||||
}
|
||||
|
||||
if (
|
||||
paddingInThisChunk &&
|
||||
!seenNonZeroPadding &&
|
||||
!paddingInThisChunk.every(el => el === 0)
|
||||
) {
|
||||
seenNonZeroPadding = true;
|
||||
}
|
||||
},
|
||||
flush(callback) {
|
||||
onPaddingAnalyzed({ isPaddingAllZeros: !seenNonZeroPadding });
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -58,6 +58,7 @@ export async function uploadAttachment(
|
|||
height,
|
||||
caption,
|
||||
blurHash,
|
||||
isReencryptableToSameDigest: true,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue