Hydrate redux before updating conversations' last messages after import

Co-authored-by: trevor-signal <131492920+trevor-signal@users.noreply.github.com>
This commit is contained in:
automated-signal 2024-12-03 15:56:17 -06:00 committed by GitHub
parent fa31d718bd
commit 52f74c3b96
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 41 additions and 19 deletions

View file

@ -1312,7 +1312,14 @@ export class BackupExportStream extends Readable {
groupCall.startedCallRecipientId = recipientId;
}
groupCall.callId = Long.fromString(callId);
try {
groupCall.callId = Long.fromString(callId);
} catch (e) {
// Could not convert callId to long; likely a legacy backfilled callId with uuid
// TODO (DESKTOP-8007)
groupCall.callId = Long.fromNumber(0);
}
groupCall.state = toGroupCallStateProto(callHistory.status);
groupCall.startedCallTimestamp = Long.fromNumber(callHistory.timestamp);
if (callHistory.endedTimestamp != null) {
@ -1333,7 +1340,14 @@ export class BackupExportStream extends Readable {
return { kind: NonBubbleResultKind.Drop };
}
individualCall.callId = Long.fromString(callId);
try {
individualCall.callId = Long.fromString(callId);
} catch (e) {
// TODO (DESKTOP-8007)
// Could not convert callId to long; likely a legacy backfilled callId with uuid
individualCall.callId = Long.fromNumber(0);
}
individualCall.type = toIndividualCallTypeProto(type);
individualCall.direction = toIndividualCallDirectionProto(direction);
individualCall.state = toIndividualCallStateProto(status);

View file

@ -318,6 +318,10 @@ export class BackupImportStream extends Writable {
// Load identity keys we just saved.
await window.storage.protocol.hydrateCaches();
// Load all data into redux (need to do this before updating a
// conversation's last message, which uses redux selectors)
await loadAllAndReinitializeRedux();
const allConversations = window.ConversationController.getAll();
// Update last message in every active conversation now that we have
@ -351,8 +355,6 @@ export class BackupImportStream extends Writable {
.map(([, id]) => id)
);
await loadAllAndReinitializeRedux();
await window.storage.put(
'backupMediaDownloadTotalBytes',
await DataReader.getSizeOfPendingBackupAttachmentDownloadJobs()
@ -2163,7 +2165,7 @@ export class BackupImportStream extends Writable {
: undefined;
let callId: string;
if (callIdLong) {
if (callIdLong?.toNumber()) {
callId = callIdLong.toString();
} else {
// Legacy calls may not have a callId, so we generate one locally
@ -2215,7 +2217,7 @@ export class BackupImportStream extends Writable {
} = updateMessage.individualCall;
let callId: string;
if (callIdLong) {
if (callIdLong?.toNumber()) {
callId = callIdLong.toString();
} else {
// Legacy calls may not have a callId, so we generate one locally
@ -2983,9 +2985,14 @@ export class BackupImportStream extends Writable {
state,
callTimestamp,
}: Backups.IAdHocCall): Promise<void> {
strictAssert(callIdLong, 'AdHocCall must have a callId');
let callId: string;
if (callIdLong?.toNumber()) {
callId = callIdLong.toString();
} else {
// Legacy calls may not have a callId, so we generate one locally
callId = generateUuid();
}
const callId = callIdLong.toString();
const logId = `fromAdhocCall(${callId.slice(-2)})`;
strictAssert(callTimestamp, `${logId}: must have a valid timestamp`);

View file

@ -2,7 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { partition } from 'lodash';
import * as log from '../logging/log';
import * as logger from '../logging/log';
import { isLongMessage } from '../types/MIME';
import { getMessageIdForLogging } from './idForLogging';
import {
@ -50,14 +50,15 @@ function getAttachmentSignatureSafe(
try {
return getAttachmentSignature(attachment);
} catch {
log.warn(
'queueAttachmentDownloads: attachment was missing digest',
attachment.blurHash
);
return undefined;
}
}
function getLogger(source: AttachmentDownloadSource) {
const verbose = source !== AttachmentDownloadSource.BACKUP_IMPORT;
const log = verbose ? logger : { ...logger, info: () => null };
return log;
}
// Receive logic
// NOTE: If you're changing any logic in this function that deals with the
// count then you'll also have to modify ./hasAttachmentsDownloads
@ -79,10 +80,7 @@ export async function queueAttachmentDownloads(
let bodyAttachment;
const idLog = `queueAttachmentDownloads(${idForLogging}})`;
log.info(
`${idLog}: Queueing ${attachmentsToQueue.length} attachment downloads`
);
const log = getLogger(source);
const [longMessageAttachments, normalAttachments] = partition(
attachmentsToQueue,
@ -328,12 +326,12 @@ export async function queueAttachmentDownloads(
);
}
log.info(`${idLog}: Queued ${count} total attachment downloads`);
if (count <= 0) {
return;
}
log.info(`${idLog}: Queued ${count} total attachment downloads`);
return {
attachments,
bodyAttachment,
@ -367,6 +365,7 @@ async function queueNormalAttachments({
attachments: Array<AttachmentType>;
count: number;
}> {
const log = getLogger(source);
// Look through "otherAttachments" which can either be attachments in the
// edit history or the message's attachments and see if any of the attachments
// are the same. If they are let's replace it so that we don't download more
@ -465,6 +464,7 @@ async function queuePreviews({
urgency: AttachmentDownloadUrgency;
source: AttachmentDownloadSource;
}): Promise<{ preview: Array<LinkPreviewType>; count: number }> {
const log = getLogger(source);
// Similar to queueNormalAttachments' logic for detecting same attachments
// except here we also pick by link preview URL.
const previewSignatures: Map<string, LinkPreviewType> = new Map();
@ -560,6 +560,7 @@ async function queueQuoteAttachments({
urgency: AttachmentDownloadUrgency;
source: AttachmentDownloadSource;
}): Promise<{ quote?: QuotedMessageType; count: number }> {
const log = getLogger(source);
let count = 0;
if (!quote) {
return { quote, count };