Export long message attachments
This commit is contained in:
parent
a9406a7914
commit
511fc9c1a0
20 changed files with 423 additions and 82 deletions
|
@ -59,7 +59,7 @@ import { isOlderThan } from './util/timestamp';
|
|||
import { isValidReactionEmoji } from './reactions/isValidReactionEmoji';
|
||||
import type { ConversationModel } from './models/conversations';
|
||||
import { getAuthor, isIncoming } from './messages/helpers';
|
||||
import { migrateMessageData } from './messages/migrateMessageData';
|
||||
import { migrateBatchOfMessages } from './messages/migrateMessageData';
|
||||
import { createBatcher } from './util/batcher';
|
||||
import {
|
||||
initializeAllJobQueues,
|
||||
|
@ -347,7 +347,6 @@ export async function startApp(): Promise<void> {
|
|||
window.setImmediate = window.nodeSetImmediate;
|
||||
|
||||
const { Message } = window.Signal.Types;
|
||||
const { upgradeMessageSchema } = window.Signal.Migrations;
|
||||
|
||||
log.info('background page reloaded');
|
||||
log.info('environment:', getEnvironment());
|
||||
|
@ -986,13 +985,8 @@ export async function startApp(): Promise<void> {
|
|||
log.warn(
|
||||
`idleDetector/idle: fetching at most ${NUM_MESSAGES_PER_BATCH} for migration`
|
||||
);
|
||||
const batchWithIndex = await migrateMessageData({
|
||||
const batchWithIndex = await migrateBatchOfMessages({
|
||||
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
|
||||
upgradeMessageSchema,
|
||||
getMessagesNeedingUpgrade: DataReader.getMessagesNeedingUpgrade,
|
||||
saveMessages: DataWriter.saveMessages,
|
||||
incrementMessagesMigrationAttempts:
|
||||
DataWriter.incrementMessagesMigrationAttempts,
|
||||
});
|
||||
log.info('idleDetector/idle: Upgraded messages:', batchWithIndex);
|
||||
isMigrationWithIndexComplete = batchWithIndex.done;
|
||||
|
|
|
@ -125,7 +125,12 @@ export function EditHistoryMessagesModal({
|
|||
isEditedMessage
|
||||
isSpoilerExpanded={revealedSpoilersById[currentMessageId] || {}}
|
||||
key={currentMessage.timestamp}
|
||||
kickOffAttachmentDownload={kickOffAttachmentDownload}
|
||||
kickOffAttachmentDownload={({ attachment }) =>
|
||||
kickOffAttachmentDownload({
|
||||
attachment,
|
||||
messageId: currentMessage.id,
|
||||
})
|
||||
}
|
||||
messageExpanded={(messageId, displayLimit) => {
|
||||
const update = {
|
||||
...displayLimitById,
|
||||
|
@ -188,7 +193,12 @@ export function EditHistoryMessagesModal({
|
|||
getPreferredBadge={getPreferredBadge}
|
||||
i18n={i18n}
|
||||
isSpoilerExpanded={revealedSpoilersById[syntheticId] || {}}
|
||||
kickOffAttachmentDownload={kickOffAttachmentDownload}
|
||||
kickOffAttachmentDownload={({ attachment }) =>
|
||||
kickOffAttachmentDownload({
|
||||
attachment,
|
||||
messageId: messageAttributes.id,
|
||||
})
|
||||
}
|
||||
messageExpanded={(messageId, displayLimit) => {
|
||||
const update = {
|
||||
...displayLimitById,
|
||||
|
|
|
@ -650,15 +650,10 @@ export function LeftPane({
|
|||
dialogs.push({ key: 'banner', dialog: maybeBanner });
|
||||
}
|
||||
|
||||
// We'll show the backup media download progress banner if the download is currently or
|
||||
// was ongoing at some point during the lifecycle of this component
|
||||
|
||||
const isMediaBackupDownloadIncomplete =
|
||||
backupMediaDownloadProgress?.totalBytes > 0 &&
|
||||
backupMediaDownloadProgress.downloadedBytes <
|
||||
backupMediaDownloadProgress.totalBytes;
|
||||
const hasMediaBeenQueuedForBackup =
|
||||
backupMediaDownloadProgress?.totalBytes > 0;
|
||||
if (
|
||||
isMediaBackupDownloadIncomplete &&
|
||||
hasMediaBeenQueuedForBackup &&
|
||||
!backupMediaDownloadProgress.downloadBannerDismissed
|
||||
) {
|
||||
dialogs.push({
|
||||
|
|
|
@ -1966,6 +1966,9 @@ export class Message extends React.PureComponent<Props, State> {
|
|||
if (!textAttachment) {
|
||||
return;
|
||||
}
|
||||
if (isDownloaded(textAttachment)) {
|
||||
return;
|
||||
}
|
||||
kickOffAttachmentDownload({
|
||||
attachment: textAttachment,
|
||||
messageId: id,
|
||||
|
|
|
@ -5,7 +5,7 @@ import type { KeyboardEvent } from 'react';
|
|||
import React from 'react';
|
||||
|
||||
import type { AttachmentType } from '../../types/Attachment';
|
||||
import { canBeDownloaded } from '../../types/Attachment';
|
||||
import { canBeDownloaded, isDownloaded } from '../../types/Attachment';
|
||||
import { getSizeClass } from '../emoji/lib';
|
||||
|
||||
import type { ShowConversationType } from '../../state/ducks/conversations';
|
||||
|
@ -35,7 +35,7 @@ export type Props = {
|
|||
text: string;
|
||||
textAttachment?: Pick<
|
||||
AttachmentType,
|
||||
'pending' | 'digest' | 'key' | 'wasTooBig'
|
||||
'pending' | 'digest' | 'key' | 'wasTooBig' | 'path'
|
||||
>;
|
||||
};
|
||||
|
||||
|
@ -97,6 +97,7 @@ export function MessageBody({
|
|||
} else if (
|
||||
textAttachment &&
|
||||
canBeDownloaded(textAttachment) &&
|
||||
!isDownloaded(textAttachment) &&
|
||||
kickOffBodyDownload
|
||||
) {
|
||||
endNotification = (
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
import { isNumber } from 'lodash';
|
||||
import PQueue from 'p-queue';
|
||||
import { v4 as generateUuid } from 'uuid';
|
||||
|
||||
import { DataWriter } from '../../sql/Client';
|
||||
import * as Errors from '../../types/errors';
|
||||
|
@ -30,10 +29,8 @@ import type {
|
|||
import type {
|
||||
AttachmentType,
|
||||
UploadedAttachmentType,
|
||||
AttachmentWithHydratedData,
|
||||
} from '../../types/Attachment';
|
||||
import { copyCdnFields } from '../../util/attachments';
|
||||
import { LONG_MESSAGE } from '../../types/MIME';
|
||||
import { LONG_ATTACHMENT_LIMIT } from '../../types/Message';
|
||||
import type { RawBodyRange } from '../../types/BodyRange';
|
||||
import type { EmbeddedContactWithUploadedAvatar } from '../../types/EmbeddedContact';
|
||||
|
@ -52,7 +49,6 @@ import { sendToGroup } from '../../util/sendToGroup';
|
|||
import type { DurationInSeconds } from '../../util/durations';
|
||||
import type { ServiceIdString } from '../../types/ServiceId';
|
||||
import { normalizeAci } from '../../util/normalizeAci';
|
||||
import * as Bytes from '../../Bytes';
|
||||
import {
|
||||
getPropForTimestamp,
|
||||
getTargetOfThisEditTimestamp,
|
||||
|
@ -584,17 +580,14 @@ async function getMessageSendData({
|
|||
prop: 'body',
|
||||
targetTimestamp,
|
||||
});
|
||||
let maybeLongAttachment: AttachmentWithHydratedData | undefined;
|
||||
if (body && body.length > LONG_ATTACHMENT_LIMIT) {
|
||||
const data = Bytes.fromString(body);
|
||||
const maybeLongAttachment = getPropForTimestamp({
|
||||
log,
|
||||
message: message.attributes,
|
||||
prop: 'bodyAttachment',
|
||||
targetTimestamp,
|
||||
});
|
||||
|
||||
maybeLongAttachment = {
|
||||
contentType: LONG_MESSAGE,
|
||||
clientUuid: generateUuid(),
|
||||
fileName: `long-message-${targetTimestamp}.txt`,
|
||||
data,
|
||||
size: data.byteLength,
|
||||
};
|
||||
if (body && body.length > LONG_ATTACHMENT_LIMIT) {
|
||||
body = body.slice(0, LONG_ATTACHMENT_LIMIT);
|
||||
}
|
||||
|
||||
|
@ -630,7 +623,14 @@ async function getMessageSendData({
|
|||
)
|
||||
),
|
||||
uploadQueue.add(async () =>
|
||||
maybeLongAttachment ? uploadAttachment(maybeLongAttachment) : undefined
|
||||
maybeLongAttachment
|
||||
? uploadLongMessageAttachment({
|
||||
attachment: maybeLongAttachment,
|
||||
log,
|
||||
message,
|
||||
targetTimestamp,
|
||||
})
|
||||
: undefined
|
||||
),
|
||||
uploadMessageContacts(message, uploadQueue),
|
||||
uploadMessagePreviews({
|
||||
|
@ -758,6 +758,52 @@ async function uploadSingleAttachment({
|
|||
return uploaded;
|
||||
}
|
||||
|
||||
async function uploadLongMessageAttachment({
|
||||
attachment,
|
||||
log,
|
||||
message,
|
||||
targetTimestamp,
|
||||
}: {
|
||||
attachment: AttachmentType;
|
||||
log: LoggerType;
|
||||
message: MessageModel;
|
||||
targetTimestamp: number;
|
||||
}): Promise<UploadedAttachmentType> {
|
||||
const { loadAttachmentData } = window.Signal.Migrations;
|
||||
|
||||
const withData = await loadAttachmentData(attachment);
|
||||
const uploaded = await uploadAttachment(withData);
|
||||
|
||||
// Add digest to the attachment
|
||||
const logId = `uploadLongMessageAttachment(${message.idForLogging()}`;
|
||||
const oldAttachment = getPropForTimestamp({
|
||||
log,
|
||||
message: message.attributes,
|
||||
prop: 'bodyAttachment',
|
||||
targetTimestamp,
|
||||
});
|
||||
strictAssert(
|
||||
oldAttachment !== undefined,
|
||||
`${logId}: Attachment was uploaded, but message doesn't ` +
|
||||
'have long message attachment anymore'
|
||||
);
|
||||
|
||||
const newBodyAttachment = { ...oldAttachment, ...copyCdnFields(uploaded) };
|
||||
|
||||
const attributesToUpdate = getChangesForPropAtTimestamp({
|
||||
log,
|
||||
message: message.attributes,
|
||||
prop: 'bodyAttachment',
|
||||
targetTimestamp,
|
||||
value: newBodyAttachment,
|
||||
});
|
||||
if (attributesToUpdate) {
|
||||
message.set(attributesToUpdate);
|
||||
}
|
||||
|
||||
return uploaded;
|
||||
}
|
||||
|
||||
async function uploadMessageQuote({
|
||||
log,
|
||||
message,
|
||||
|
|
|
@ -64,7 +64,7 @@ export async function addAttachmentToMessage(
|
|||
return {
|
||||
...edit,
|
||||
body: Bytes.toString(attachmentData),
|
||||
bodyAttachment: undefined,
|
||||
bodyAttachment: attachment,
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -96,7 +96,7 @@ export async function addAttachmentToMessage(
|
|||
|
||||
message.set({
|
||||
body: Bytes.toString(attachmentData),
|
||||
bodyAttachment: undefined,
|
||||
bodyAttachment: attachment,
|
||||
});
|
||||
} finally {
|
||||
if (attachment.path) {
|
||||
|
|
|
@ -9,6 +9,7 @@ import { isNotNil } from '../util/isNotNil';
|
|||
import type { MessageAttributesType } from '../model-types.d';
|
||||
import type { AciString } from '../types/ServiceId';
|
||||
import * as Errors from '../types/errors';
|
||||
import { DataReader, DataWriter } from '../sql/Client';
|
||||
|
||||
const MAX_CONCURRENCY = 5;
|
||||
|
||||
|
@ -126,3 +127,18 @@ export async function migrateMessageData({
|
|||
totalDuration,
|
||||
};
|
||||
}
|
||||
|
||||
export async function migrateBatchOfMessages({
|
||||
numMessagesPerBatch,
|
||||
}: {
|
||||
numMessagesPerBatch: number;
|
||||
}): ReturnType<typeof migrateMessageData> {
|
||||
return migrateMessageData({
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema: window.Signal.Migrations.upgradeMessageSchema,
|
||||
getMessagesNeedingUpgrade: DataReader.getMessagesNeedingUpgrade,
|
||||
saveMessages: DataWriter.saveMessages,
|
||||
incrementMessagesMigrationAttempts:
|
||||
DataWriter.incrementMessagesMigrationAttempts,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1857,11 +1857,17 @@ export class MessageModel extends window.Backbone.Model<MessageAttributesType> {
|
|||
const ourPni = window.textsecure.storage.user.getCheckedPni();
|
||||
const ourServiceIds: Set<ServiceIdString> = new Set([ourAci, ourPni]);
|
||||
|
||||
const [longMessageAttachments, normalAttachments] = partition(
|
||||
dataMessage.attachments ?? [],
|
||||
attachment => MIME.isLongMessage(attachment.contentType)
|
||||
);
|
||||
|
||||
window.MessageCache.toMessageAttributes(this.attributes);
|
||||
message.set({
|
||||
id: messageId,
|
||||
attachments: dataMessage.attachments,
|
||||
attachments: normalAttachments,
|
||||
body: dataMessage.body,
|
||||
bodyAttachment: longMessageAttachments[0],
|
||||
bodyRanges: dataMessage.bodyRanges,
|
||||
contact: dataMessage.contact,
|
||||
conversationId: conversation.id,
|
||||
|
|
|
@ -133,6 +133,7 @@ import { CallLinkRestrictions } from '../../types/CallLink';
|
|||
import { toAdminKeyBytes } from '../../util/callLinks';
|
||||
import { getRoomIdFromRootKey } from '../../util/callLinksRingrtc';
|
||||
import { SeenStatus } from '../../MessageSeenStatus';
|
||||
import { migrateBatchOfMessages } from '../../messages/migrateMessageData';
|
||||
|
||||
const MAX_CONCURRENCY = 10;
|
||||
|
||||
|
@ -219,6 +220,25 @@ export class BackupExportStream extends Readable {
|
|||
(async () => {
|
||||
log.info('BackupExportStream: starting...');
|
||||
drop(AttachmentBackupManager.stop());
|
||||
log.info('BackupExportStream: message migration starting...');
|
||||
let batchMigrationResult:
|
||||
| Awaited<ReturnType<typeof migrateBatchOfMessages>>
|
||||
| undefined;
|
||||
let totalMigrated = 0;
|
||||
while (!batchMigrationResult?.done) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
batchMigrationResult = await migrateBatchOfMessages({
|
||||
numMessagesPerBatch: 1000,
|
||||
});
|
||||
totalMigrated += batchMigrationResult.numProcessed;
|
||||
log.info(
|
||||
`BackupExportStream: Migrated batch of ${batchMigrationResult.numProcessed}`
|
||||
);
|
||||
}
|
||||
log.info(
|
||||
`BackupExportStream: message migration complete; ${totalMigrated} messages migrated`
|
||||
);
|
||||
|
||||
await pauseWriteAccess();
|
||||
try {
|
||||
await this.unsafeRun(backupLevel);
|
||||
|
@ -1162,10 +1182,11 @@ export class BackupExportStream extends Readable {
|
|||
};
|
||||
}
|
||||
} else {
|
||||
result.standardMessage = await this.toStandardMessage(
|
||||
result.standardMessage = await this.toStandardMessage({
|
||||
message,
|
||||
backupLevel
|
||||
);
|
||||
backupLevel,
|
||||
});
|
||||
|
||||
result.revisions = await this.toChatItemRevisions(
|
||||
result,
|
||||
message,
|
||||
|
@ -2157,7 +2178,7 @@ export class BackupExportStream extends Readable {
|
|||
return new Backups.MessageAttachment({
|
||||
pointer: filePointer,
|
||||
flag: this.getMessageAttachmentFlag(attachment),
|
||||
wasDownloaded: isDownloaded(attachment), // should always be true
|
||||
wasDownloaded: isDownloaded(attachment),
|
||||
clientUuid: clientUuid ? uuidToBytes(clientUuid) : undefined,
|
||||
});
|
||||
}
|
||||
|
@ -2349,26 +2370,30 @@ export class BackupExportStream extends Readable {
|
|||
};
|
||||
}
|
||||
|
||||
private async toStandardMessage(
|
||||
private async toStandardMessage({
|
||||
message,
|
||||
backupLevel,
|
||||
}: {
|
||||
message: Pick<
|
||||
MessageAttributesType,
|
||||
| 'quote'
|
||||
| 'attachments'
|
||||
| 'body'
|
||||
| 'bodyAttachment'
|
||||
| 'bodyRanges'
|
||||
| 'preview'
|
||||
| 'reactions'
|
||||
| 'received_at'
|
||||
>,
|
||||
backupLevel: BackupLevel
|
||||
): Promise<Backups.IStandardMessage> {
|
||||
>;
|
||||
backupLevel: BackupLevel;
|
||||
}): Promise<Backups.IStandardMessage> {
|
||||
return {
|
||||
quote: await this.toQuote({
|
||||
quote: message.quote,
|
||||
backupLevel,
|
||||
messageReceivedAt: message.received_at,
|
||||
}),
|
||||
attachments: message.attachments
|
||||
attachments: message.attachments?.length
|
||||
? await Promise.all(
|
||||
message.attachments.map(attachment => {
|
||||
return this.processMessageAttachment({
|
||||
|
@ -2379,12 +2404,16 @@ export class BackupExportStream extends Readable {
|
|||
})
|
||||
)
|
||||
: undefined,
|
||||
longText: message.bodyAttachment
|
||||
? await this.processAttachment({
|
||||
attachment: message.bodyAttachment,
|
||||
backupLevel,
|
||||
messageReceivedAt: message.received_at,
|
||||
})
|
||||
: undefined,
|
||||
text:
|
||||
message.body != null
|
||||
? {
|
||||
// TODO (DESKTOP-7207): handle long message text attachments
|
||||
// Note that we store full text on the message model so we have to
|
||||
// trim it before serializing.
|
||||
body: message.body?.slice(0, LONG_ATTACHMENT_LIMIT),
|
||||
bodyRanges: message.bodyRanges?.map(range =>
|
||||
this.toBodyRange(range)
|
||||
|
@ -2449,7 +2478,10 @@ export class BackupExportStream extends Readable {
|
|||
: this.getIncomingMessageDetails(history),
|
||||
|
||||
// Message itself
|
||||
standardMessage: await this.toStandardMessage(history, backupLevel),
|
||||
standardMessage: await this.toStandardMessage({
|
||||
message: history,
|
||||
backupLevel,
|
||||
}),
|
||||
};
|
||||
|
||||
// Backups use oldest to newest order
|
||||
|
|
|
@ -109,6 +109,7 @@ import { fromAdminKeyBytes } from '../../util/callLinks';
|
|||
import { getRoomIdFromRootKey } from '../../util/callLinksRingrtc';
|
||||
import { reinitializeRedux } from '../../state/reinitializeRedux';
|
||||
import { getParametersForRedux, loadAll } from '../allLoaders';
|
||||
import { resetBackupMediaDownloadProgress } from '../../util/backupMediaDownload';
|
||||
|
||||
const MAX_CONCURRENCY = 10;
|
||||
|
||||
|
@ -308,8 +309,7 @@ export class BackupImportStream extends Writable {
|
|||
): Promise<BackupImportStream> {
|
||||
await AttachmentDownloadManager.stop();
|
||||
await DataWriter.removeAllBackupAttachmentDownloadJobs();
|
||||
await window.storage.put('backupMediaDownloadCompletedBytes', 0);
|
||||
await window.storage.put('backupMediaDownloadTotalBytes', 0);
|
||||
await resetBackupMediaDownloadProgress();
|
||||
|
||||
return new BackupImportStream(backupType);
|
||||
}
|
||||
|
@ -1504,6 +1504,9 @@ export class BackupImportStream extends Writable {
|
|||
return {
|
||||
body: data.text?.body || undefined,
|
||||
bodyRanges: this.fromBodyRanges(data.text),
|
||||
bodyAttachment: data.longText
|
||||
? convertFilePointerToAttachment(data.longText)
|
||||
: undefined,
|
||||
attachments: data.attachments?.length
|
||||
? data.attachments
|
||||
.map(convertBackupMessageAttachmentToAttachment)
|
||||
|
|
|
@ -6618,7 +6618,8 @@ function getExternalFilesForMessage(message: MessageType): {
|
|||
externalAttachments: Array<string>;
|
||||
externalDownloads: Array<string>;
|
||||
} {
|
||||
const { attachments, contact, quote, preview, sticker } = message;
|
||||
const { attachments, bodyAttachment, contact, quote, preview, sticker } =
|
||||
message;
|
||||
const externalAttachments: Array<string> = [];
|
||||
const externalDownloads: Array<string> = [];
|
||||
|
||||
|
@ -6653,6 +6654,16 @@ function getExternalFilesForMessage(message: MessageType): {
|
|||
}
|
||||
});
|
||||
|
||||
if (bodyAttachment?.path) {
|
||||
externalAttachments.push(bodyAttachment.path);
|
||||
}
|
||||
|
||||
for (const editHistory of message.editHistory ?? []) {
|
||||
if (editHistory.bodyAttachment?.path) {
|
||||
externalAttachments.push(editHistory.bodyAttachment.path);
|
||||
}
|
||||
}
|
||||
|
||||
if (quote && quote.attachments && quote.attachments.length) {
|
||||
forEach(quote.attachments, attachment => {
|
||||
const { thumbnail } = attachment;
|
||||
|
|
|
@ -20,6 +20,7 @@ import {
|
|||
IMAGE_JPEG,
|
||||
IMAGE_PNG,
|
||||
IMAGE_WEBP,
|
||||
LONG_MESSAGE,
|
||||
VIDEO_MP4,
|
||||
} from '../../types/MIME';
|
||||
import type {
|
||||
|
@ -130,6 +131,128 @@ describe('backup/attachments', () => {
|
|||
};
|
||||
}
|
||||
|
||||
describe('long-message attachments', () => {
|
||||
it('preserves attachment still on message.attachments', async () => {
|
||||
const longMessageAttachment = composeAttachment(1, {
|
||||
contentType: LONG_MESSAGE,
|
||||
});
|
||||
const normalAttachment = composeAttachment(2);
|
||||
|
||||
strictAssert(longMessageAttachment.digest, 'digest exists');
|
||||
strictAssert(normalAttachment.digest, 'digest exists');
|
||||
|
||||
await asymmetricRoundtripHarness(
|
||||
[
|
||||
composeMessage(1, {
|
||||
attachments: [longMessageAttachment, normalAttachment],
|
||||
schemaVersion: 12,
|
||||
}),
|
||||
],
|
||||
// path & iv will not be roundtripped
|
||||
[
|
||||
composeMessage(1, {
|
||||
attachments: [
|
||||
omit(longMessageAttachment, ['path', 'iv', 'thumbnail']),
|
||||
omit(normalAttachment, ['path', 'iv', 'thumbnail']),
|
||||
],
|
||||
}),
|
||||
],
|
||||
{ backupLevel: BackupLevel.Messages }
|
||||
);
|
||||
});
|
||||
it('migration creates long-message attachment if there is a long message.body (i.e. schemaVersion < 13)', async () => {
|
||||
await asymmetricRoundtripHarness(
|
||||
[
|
||||
composeMessage(1, {
|
||||
body: 'a'.repeat(3000),
|
||||
schemaVersion: 12,
|
||||
}),
|
||||
],
|
||||
[
|
||||
composeMessage(1, {
|
||||
body: 'a'.repeat(2048),
|
||||
bodyAttachment: {
|
||||
contentType: LONG_MESSAGE,
|
||||
size: 3000,
|
||||
},
|
||||
}),
|
||||
],
|
||||
{
|
||||
backupLevel: BackupLevel.Media,
|
||||
comparator: (expected, msgInDB) => {
|
||||
assert.deepStrictEqual(
|
||||
omit(expected, 'bodyAttachment'),
|
||||
omit(msgInDB, 'bodyAttachment')
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(
|
||||
expected.bodyAttachment,
|
||||
// all encryption info will be generated anew
|
||||
omit(msgInDB.bodyAttachment, [
|
||||
'backupLocator',
|
||||
'digest',
|
||||
'key',
|
||||
'downloadPath',
|
||||
])
|
||||
);
|
||||
|
||||
assert.isNotEmpty(msgInDB.bodyAttachment?.backupLocator);
|
||||
assert.isNotEmpty(msgInDB.bodyAttachment?.digest);
|
||||
assert.isNotEmpty(msgInDB.bodyAttachment?.key);
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
it('handles existing bodyAttachments', async () => {
|
||||
const attachment = omit(
|
||||
composeAttachment(1, {
|
||||
contentType: LONG_MESSAGE,
|
||||
size: 3000,
|
||||
downloadPath: 'downloadPath',
|
||||
}),
|
||||
'thumbnail'
|
||||
);
|
||||
strictAssert(attachment.digest, 'must exist');
|
||||
|
||||
await asymmetricRoundtripHarness(
|
||||
[
|
||||
composeMessage(1, {
|
||||
bodyAttachment: attachment,
|
||||
body: 'a'.repeat(3000),
|
||||
}),
|
||||
],
|
||||
// path & iv will not be roundtripped
|
||||
[
|
||||
composeMessage(1, {
|
||||
body: 'a'.repeat(2048),
|
||||
bodyAttachment: {
|
||||
...omit(attachment, ['iv', 'path', 'uploadTimestamp']),
|
||||
backupLocator: {
|
||||
mediaName: digestToMediaName(attachment.digest),
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
{
|
||||
backupLevel: BackupLevel.Media,
|
||||
comparator: (expected, msgInDB) => {
|
||||
assert.deepStrictEqual(
|
||||
omit(expected, 'bodyAttachment'),
|
||||
omit(msgInDB, 'bodyAttachment')
|
||||
);
|
||||
|
||||
assert.deepStrictEqual(
|
||||
omit(expected.bodyAttachment, ['clientUuid', 'downloadPath']),
|
||||
omit(msgInDB.bodyAttachment, ['clientUuid', 'downloadPath'])
|
||||
);
|
||||
|
||||
assert.isNotEmpty(msgInDB.bodyAttachment?.downloadPath);
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('normal attachments', () => {
|
||||
it('BackupLevel.Messages, roundtrips normal attachments', async () => {
|
||||
const attachment1 = composeAttachment(1);
|
||||
|
|
|
@ -692,4 +692,34 @@ describe('Message', () => {
|
|||
assert.deepEqual(result, expected);
|
||||
});
|
||||
});
|
||||
describe('migrateBodyAttachmentToDisk', () => {
|
||||
it('writes long text attachment to disk, but does not truncate body', async () => {
|
||||
const message = getDefaultMessage({
|
||||
body: 'a'.repeat(3000),
|
||||
});
|
||||
const expected = getDefaultMessage({
|
||||
body: 'a'.repeat(3000),
|
||||
bodyAttachment: {
|
||||
contentType: MIME.LONG_MESSAGE,
|
||||
...FAKE_LOCAL_ATTACHMENT,
|
||||
},
|
||||
});
|
||||
const result = await Message.migrateBodyAttachmentToDisk(
|
||||
message,
|
||||
getDefaultContext()
|
||||
);
|
||||
assert.deepEqual(result, expected);
|
||||
});
|
||||
it('does nothing if body is not too long', async () => {
|
||||
const message = getDefaultMessage({
|
||||
body: 'a'.repeat(2048),
|
||||
});
|
||||
|
||||
const result = await Message.migrateBodyAttachmentToDisk(
|
||||
message,
|
||||
getDefaultContext()
|
||||
);
|
||||
assert.deepEqual(result, message);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -750,16 +750,18 @@ export function isGIF(attachments?: ReadonlyArray<AttachmentType>): boolean {
|
|||
return hasFlag && isVideoAttachment(attachment);
|
||||
}
|
||||
|
||||
function resolveNestedAttachment(
|
||||
attachment?: AttachmentType
|
||||
): AttachmentType | undefined {
|
||||
function resolveNestedAttachment<
|
||||
T extends Pick<AttachmentType, 'textAttachment'>,
|
||||
>(attachment?: T): T | AttachmentType | undefined {
|
||||
if (attachment?.textAttachment?.preview?.image) {
|
||||
return attachment.textAttachment.preview.image;
|
||||
}
|
||||
return attachment;
|
||||
}
|
||||
|
||||
export function isDownloaded(attachment?: AttachmentType): boolean {
|
||||
export function isDownloaded(
|
||||
attachment?: Pick<AttachmentType, 'path' | 'textAttachment'>
|
||||
): boolean {
|
||||
const resolved = resolveNestedAttachment(attachment);
|
||||
return Boolean(resolved && (resolved.path || resolved.textAttachment));
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import * as Errors from './errors';
|
|||
import * as SchemaVersion from './SchemaVersion';
|
||||
import { initializeAttachmentMetadata } from './message/initializeAttachmentMetadata';
|
||||
|
||||
import { LONG_MESSAGE } from './MIME';
|
||||
import type * as MIME from './MIME';
|
||||
import type { LoggerType } from './Logging';
|
||||
import type {
|
||||
|
@ -45,6 +46,8 @@ import {
|
|||
} from '../util/getLocalAttachmentUrl';
|
||||
import { encryptLegacyAttachment } from '../util/encryptLegacyAttachment';
|
||||
import { deepClone } from '../util/deepClone';
|
||||
import { LONG_ATTACHMENT_LIMIT } from './Message';
|
||||
import * as Bytes from '../Bytes';
|
||||
|
||||
export const GROUP = 'group';
|
||||
export const PRIVATE = 'private';
|
||||
|
@ -125,8 +128,12 @@ export type ContextWithMessageType = ContextType & {
|
|||
// attachment filenames
|
||||
// Version 10
|
||||
// - Preview: A new type of attachment can be included in a message.
|
||||
// Version 11
|
||||
// Version 11 (deprecated)
|
||||
// - Attachments: add sha256 plaintextHash
|
||||
// Version 12:
|
||||
// - Attachments: encrypt attachments on disk
|
||||
// Version 13:
|
||||
// - Attachments: write bodyAttachment to disk
|
||||
|
||||
const INITIAL_SCHEMA_VERSION = 0;
|
||||
|
||||
|
@ -571,6 +578,10 @@ const toVersion12 = _withSchemaVersion({
|
|||
return result;
|
||||
},
|
||||
});
|
||||
const toVersion13 = _withSchemaVersion({
|
||||
schemaVersion: 13,
|
||||
upgrade: migrateBodyAttachmentToDisk,
|
||||
});
|
||||
|
||||
const VERSIONS = [
|
||||
toVersion0,
|
||||
|
@ -586,7 +597,9 @@ const VERSIONS = [
|
|||
toVersion10,
|
||||
toVersion11,
|
||||
toVersion12,
|
||||
toVersion13,
|
||||
];
|
||||
|
||||
export const CURRENT_SCHEMA_VERSION = VERSIONS.length - 1;
|
||||
|
||||
// We need dimensions and screenshots for images for proper display
|
||||
|
@ -953,13 +966,24 @@ export const deleteAllExternalFiles = ({
|
|||
}
|
||||
|
||||
return async (message: MessageAttributesType) => {
|
||||
const { attachments, editHistory, quote, contact, preview, sticker } =
|
||||
message;
|
||||
const {
|
||||
attachments,
|
||||
bodyAttachment,
|
||||
editHistory,
|
||||
quote,
|
||||
contact,
|
||||
preview,
|
||||
sticker,
|
||||
} = message;
|
||||
|
||||
if (attachments && attachments.length) {
|
||||
await Promise.all(attachments.map(deleteAttachmentData));
|
||||
}
|
||||
|
||||
if (bodyAttachment) {
|
||||
await deleteAttachmentData(bodyAttachment);
|
||||
}
|
||||
|
||||
if (quote && quote.attachments && quote.attachments.length) {
|
||||
await Promise.all(
|
||||
quote.attachments.map(async attachment => {
|
||||
|
@ -1001,7 +1025,11 @@ export const deleteAllExternalFiles = ({
|
|||
|
||||
if (editHistory && editHistory.length) {
|
||||
await Promise.all(
|
||||
editHistory.map(edit => {
|
||||
editHistory.map(async edit => {
|
||||
if (edit.bodyAttachment) {
|
||||
await deleteAttachmentData(edit.bodyAttachment);
|
||||
}
|
||||
|
||||
if (!edit.attachments || !edit.attachments.length) {
|
||||
return;
|
||||
}
|
||||
|
@ -1015,6 +1043,35 @@ export const deleteAllExternalFiles = ({
|
|||
};
|
||||
};
|
||||
|
||||
export async function migrateBodyAttachmentToDisk(
|
||||
message: MessageAttributesType,
|
||||
{ logger, writeNewAttachmentData }: ContextType
|
||||
): Promise<MessageAttributesType> {
|
||||
const logId = `Message2.toVersion13(${message.sent_at})`;
|
||||
|
||||
// if there is already a bodyAttachment, nothing to do
|
||||
if (message.bodyAttachment) {
|
||||
return message;
|
||||
}
|
||||
|
||||
if (!message.body || (message.body?.length ?? 0) <= LONG_ATTACHMENT_LIMIT) {
|
||||
return message;
|
||||
}
|
||||
|
||||
logger.info(`${logId}: Writing bodyAttachment to disk`);
|
||||
|
||||
const data = Bytes.fromString(message.body);
|
||||
const bodyAttachment = {
|
||||
contentType: LONG_MESSAGE,
|
||||
...(await writeNewAttachmentData(data)),
|
||||
};
|
||||
|
||||
return {
|
||||
...message,
|
||||
bodyAttachment,
|
||||
};
|
||||
}
|
||||
|
||||
async function deletePreviews(
|
||||
preview: MessageAttributesType['preview'],
|
||||
deleteOnDisk: (path: string) => Promise<void>
|
||||
|
|
|
@ -25,7 +25,7 @@ export async function cancelBackupMediaDownload(): Promise<void> {
|
|||
await resetBackupMediaDownloadItems();
|
||||
}
|
||||
|
||||
export async function resetBackupMediaDownload(): Promise<void> {
|
||||
export async function resetBackupMediaDownloadProgress(): Promise<void> {
|
||||
await resetBackupMediaDownloadItems();
|
||||
}
|
||||
|
||||
|
|
|
@ -254,6 +254,7 @@ export async function handleEditMessage(
|
|||
const editedMessage: EditHistoryType = {
|
||||
attachments: nextEditedMessageAttachments,
|
||||
body: upgradedEditedMessageData.body,
|
||||
bodyAttachment: upgradedEditedMessageData.bodyAttachment,
|
||||
bodyRanges: upgradedEditedMessageData.bodyRanges,
|
||||
preview: nextEditedMessagePreview,
|
||||
sendStateByConversationId:
|
||||
|
@ -277,6 +278,7 @@ export async function handleEditMessage(
|
|||
mainMessageModel.set({
|
||||
attachments: editedMessage.attachments,
|
||||
body: editedMessage.body,
|
||||
bodyAttachment: editedMessage.bodyAttachment,
|
||||
bodyRanges: editedMessage.bodyRanges,
|
||||
editHistory,
|
||||
editMessageTimestamp: upgradedEditedMessageData.timestamp,
|
||||
|
|
|
@ -17,7 +17,7 @@ export function hasAttachmentDownloads(
|
|||
attachment => isLongMessage(attachment.contentType)
|
||||
);
|
||||
|
||||
if (longMessageAttachments.length > 0) {
|
||||
if (longMessageAttachments.length > 0 || message.bodyAttachment) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -94,30 +94,40 @@ export async function queueAttachmentDownloads(
|
|||
}
|
||||
|
||||
if (longMessageAttachments.length > 0) {
|
||||
log.info(
|
||||
`${idLog}: Queueing ${longMessageAttachments.length} long message attachment downloads`
|
||||
);
|
||||
}
|
||||
|
||||
if (longMessageAttachments.length > 0) {
|
||||
count += 1;
|
||||
[bodyAttachment] = longMessageAttachments;
|
||||
}
|
||||
|
||||
if (!bodyAttachment && message.bodyAttachment) {
|
||||
count += 1;
|
||||
bodyAttachment = message.bodyAttachment;
|
||||
}
|
||||
|
||||
if (bodyAttachment) {
|
||||
await AttachmentDownloadManager.addJob({
|
||||
attachment: bodyAttachment,
|
||||
const bodyAttachmentsToDownload = [
|
||||
bodyAttachment,
|
||||
...(message.editHistory
|
||||
?.slice(1) // first entry is the same as the root level message!
|
||||
.map(editHistory => editHistory.bodyAttachment) ?? []),
|
||||
]
|
||||
.filter(isNotNil)
|
||||
.filter(attachment => !isDownloaded(attachment));
|
||||
|
||||
if (bodyAttachmentsToDownload.length) {
|
||||
log.info(
|
||||
`${idLog}: Queueing ${bodyAttachmentsToDownload.length} long message attachment download`
|
||||
);
|
||||
await Promise.all(
|
||||
bodyAttachmentsToDownload.map(attachment =>
|
||||
AttachmentDownloadManager.addJob({
|
||||
attachment,
|
||||
messageId,
|
||||
attachmentType: 'long-message',
|
||||
receivedAt: message.received_at,
|
||||
sentAt: message.sent_at,
|
||||
urgency,
|
||||
source,
|
||||
});
|
||||
})
|
||||
)
|
||||
);
|
||||
count += bodyAttachmentsToDownload.length;
|
||||
}
|
||||
|
||||
if (normalAttachments.length > 0) {
|
||||
|
|
Loading…
Reference in a new issue