signal-desktop/ts/sql/Server.ts

7084 lines
173 KiB
TypeScript
Raw Normal View History

2023-01-03 19:55:46 +00:00
// Copyright 2020 Signal Messenger, LLC
2020-10-30 20:34:04 +00:00
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-disable camelcase */
import { mkdirSync } from 'fs';
import { join } from 'path';
import rimraf from 'rimraf';
import { randomBytes } from 'crypto';
2022-12-14 20:48:36 +00:00
import type { Database, Statement } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateUuid } from 'uuid';
2023-08-09 00:53:06 +00:00
import { z } from 'zod';
import type { ReadonlyDeep } from 'type-fest';
import type { Dictionary } from 'lodash';
import {
forEach,
fromPairs,
groupBy,
isBoolean,
isNil,
isNumber,
isString,
last,
map,
mapValues,
noop,
omit,
2024-02-08 18:01:30 +00:00
partition,
2021-05-07 21:50:14 +00:00
pick,
} from 'lodash';
import * as Errors from '../types/errors';
import { ReadStatus } from '../messages/MessageReadStatus';
import type { GroupV2MemberType } from '../model-types.d';
import type { ReactionType } from '../types/Reactions';
import { ReactionReadStatus } from '../types/Reactions';
import { STORAGE_UI_KEYS } from '../types/StorageUIKeys';
import type { StoryDistributionIdString } from '../types/StoryDistributionId';
import type { ServiceIdString, AciString } from '../types/ServiceId';
import { isServiceIdString } from '../types/ServiceId';
import type { StoredJob } from '../jobs/types';
2024-07-22 18:16:33 +00:00
import { assertDev, strictAssert } from '../util/assert';
import { combineNames } from '../util/combineNames';
import { consoleLogger } from '../util/consoleLogger';
2021-07-09 19:36:10 +00:00
import { dropNull } from '../util/dropNull';
import { isNormalNumber } from '../util/isNormalNumber';
2021-04-29 23:02:27 +00:00
import { isNotNil } from '../util/isNotNil';
import { parseIntOrThrow } from '../util/parseIntOrThrow';
import * as durations from '../util/durations';
import { formatCountForLogging } from '../logging/formatCountForLogging';
import type { ConversationColorType, CustomColorType } from '../types/Colors';
2021-11-02 23:01:13 +00:00
import type { BadgeType, BadgeImageType } from '../badges/types';
import { parseBadgeCategory } from '../badges/BadgeCategory';
import { parseBadgeImageTheme } from '../badges/BadgeImageTheme';
import type { LoggerType } from '../types/Logging';
import * as log from '../logging/log';
2023-03-20 22:23:53 +00:00
import type {
EmptyQuery,
ArrayQuery,
Query,
JSONRows,
QueryFragment,
} from './util';
2021-10-26 22:59:08 +00:00
import {
2023-08-09 00:53:06 +00:00
sqlConstant,
2023-03-20 22:23:53 +00:00
sqlJoin,
sqlFragment,
sql,
2021-10-26 22:59:08 +00:00
jsonToObject,
objectToJSON,
batchMultiVarQuery,
getCountFromTable,
removeById,
removeAllFromTable,
getAllFromTable,
getById,
bulkAdd,
createOrUpdate,
setUserVersion,
getUserVersion,
getSchemaVersion,
} from './util';
import { updateSchema } from './migrations';
import type {
2024-07-22 18:16:33 +00:00
ReadableDB,
WritableDB,
2023-03-04 03:03:15 +00:00
AdjacentMessagesByConversationOptionsType,
2022-07-28 16:35:29 +00:00
StoredAllItemsType,
ConversationMetricsType,
ConversationType,
2021-08-31 21:35:01 +00:00
DeleteSentProtoRecipientOptionsType,
2022-08-15 21:53:33 +00:00
DeleteSentProtoRecipientResultType,
2023-03-27 23:48:57 +00:00
EditedMessageType,
EmojiType,
2022-11-28 17:19:48 +00:00
GetAllStoriesResultType,
2022-07-28 16:35:29 +00:00
GetConversationRangeCenteredOnMessageResultType,
GetKnownMessageAttachmentsResultType,
GetRecentStoryRepliesOptionsType,
2022-07-28 16:35:29 +00:00
GetUnreadByConversationAndMarkReadResultType,
IdentityKeyIdType,
2022-07-28 16:35:29 +00:00
StoredIdentityKeyType,
2022-08-03 17:10:49 +00:00
InstalledStickerPackType,
ItemKeyType,
2022-07-28 16:35:29 +00:00
StoredItemType,
ConversationMessageStatsType,
MessageAttachmentsCursorType,
2024-03-15 14:20:33 +00:00
MessageCursorType,
MessageMetricsType,
MessageType,
MessageTypeUnhydrated,
2024-03-15 14:20:33 +00:00
PageMessagesCursorType,
PageMessagesResultType,
PreKeyIdType,
2022-07-28 16:35:29 +00:00
ReactionResultType,
StoredPreKeyType,
ServerSearchResultMessageType,
SenderKeyIdType,
SenderKeyType,
SentMessageDBType,
SentMessagesType,
SentProtoType,
SentProtoWithMessageIdsType,
SentRecipientsDBType,
SentRecipientsType,
2024-07-22 18:16:33 +00:00
ServerReadableInterface,
ServerWritableInterface,
SessionIdType,
SessionType,
SignedPreKeyIdType,
2022-07-28 16:35:29 +00:00
StoredSignedPreKeyType,
2022-08-03 17:10:49 +00:00
StickerPackInfoType,
StickerPackStatusType,
StickerPackType,
StickerType,
StoryDistributionMemberType,
StoryDistributionType,
StoryDistributionWithMembersType,
StoryReadType,
2022-08-03 17:10:49 +00:00
UninstalledStickerPackType,
UnprocessedType,
UnprocessedUpdateType,
2023-03-20 22:23:53 +00:00
GetNearbyMessageFromDeletedSetOptionsType,
StoredKyberPreKeyType,
2024-05-29 23:46:43 +00:00
BackupCdnMediaObjectType,
} from './Interface';
import { SeenStatus } from '../MessageSeenStatus';
import {
SNIPPET_LEFT_PLACEHOLDER,
SNIPPET_RIGHT_PLACEHOLDER,
SNIPPET_TRUNCATION_PLACEHOLDER,
} from '../util/search';
2023-08-09 00:53:06 +00:00
import type {
CallHistoryDetails,
CallHistoryFilter,
CallHistoryGroup,
CallHistoryPagination,
2024-06-26 00:58:38 +00:00
CallLogEventTarget,
2023-08-09 00:53:06 +00:00
} from '../types/CallDisposition';
import {
DirectCallStatus,
callHistoryGroupSchema,
CallHistoryFilterStatus,
callHistoryDetailsSchema,
CallDirection,
2024-02-08 18:01:30 +00:00
GroupCallStatus,
CallType,
2024-06-26 00:58:38 +00:00
CallStatusValue,
2023-08-09 00:53:06 +00:00
} from '../types/CallDisposition';
2024-04-01 19:19:35 +00:00
import {
callLinkExists,
getAllCallLinks,
2024-04-25 17:09:05 +00:00
getCallLinkByRoomId,
2024-04-01 19:19:35 +00:00
insertCallLink,
2024-04-25 17:09:05 +00:00
updateCallLinkAdminKeyByRoomId,
2024-04-01 19:19:35 +00:00
updateCallLinkState,
} from './server/callLinks';
2024-05-20 18:15:39 +00:00
import {
replaceAllEndorsementsForGroup,
deleteAllEndorsementsForGroup,
getGroupSendCombinedEndorsementExpiration,
} from './server/groupEndorsements';
2024-04-01 19:19:35 +00:00
import { CallMode } from '../types/Calling';
import {
attachmentDownloadJobSchema,
type AttachmentDownloadJobType,
} from '../types/AttachmentDownload';
import { MAX_SYNC_TASK_ATTEMPTS } from '../util/syncTasks.types';
import type { SyncTaskType } from '../util/syncTasks';
import { isMoreRecentThan } from '../util/timestamp';
2024-05-29 23:46:43 +00:00
import {
type AttachmentBackupJobType,
attachmentBackupJobSchema,
} from '../types/AttachmentBackup';
import { redactGenericText } from '../util/privacy';
type ConversationRow = Readonly<{
json: string;
profileLastFetchedAt: null | number;
}>;
type ConversationRows = Array<ConversationRow>;
type StickerRow = Readonly<{
id: number;
packId: string;
emoji: string | null;
height: number;
isCoverOnly: number;
lastUsed: number;
path: string;
width: number;
2024-07-11 19:44:09 +00:00
version: 1 | 2;
localKey: string | null;
size: number | null;
}>;
// Because we can't force this module to conform to an interface, we narrow our exports
// to this one default export, which does conform to the interface.
// Note: In Javascript, you need to access the .default property when requiring it
// https://github.com/microsoft/TypeScript/issues/420
2024-07-22 18:16:33 +00:00
export const DataReader: ServerReadableInterface = {
close: closeReadable,
getIdentityKeyById,
getAllIdentityKeys,
getKyberPreKeyById,
getAllKyberPreKeys,
getPreKeyById,
getAllPreKeys,
getSignedPreKeyById,
getAllSignedPreKeys,
getItemById,
getAllItems,
getSenderKeyById,
getAllSenderKeys,
getAllSentProtos,
_getAllSentProtoRecipients,
_getAllSentProtoMessageIds,
getAllSessions,
getConversationCount,
getConversationById,
getAllConversations,
getAllConversationIds,
getAllGroupsInvolvingServiceId,
getGroupSendCombinedEndorsementExpiration,
searchMessages,
getMessageCount,
getStoryCount,
getRecentStoryReplies,
countStoryReadsByConversation,
getReactionByTimestamp,
_getAllReactions,
getMessageBySender,
getMessageById,
getMessagesById,
_getAllMessages,
_getAllEditedMessages,
getAllMessageIds,
getMessagesBySentAt,
getExpiredMessages,
getMessagesUnexpectedlyMissingExpirationStartTimestamp,
getSoonestMessageExpiry,
getNextTapToViewMessageTimestampToAgeOut,
getTapToViewMessagesNeedingErase,
getOlderMessagesByConversation,
getAllStories,
getNewerMessagesByConversation,
getOldestUnreadMentionOfMeForConversation,
getTotalUnreadForConversation,
getTotalUnreadMentionsOfMeForConversation,
getMessageMetricsForConversation,
getConversationRangeCenteredOnMessage,
getConversationMessageStats,
getLastConversationMessage,
getAllCallHistory,
getCallHistoryUnreadCount,
getCallHistoryMessageByCallId,
getCallHistory,
getCallHistoryGroupsCount,
getCallHistoryGroups,
hasGroupCallHistoryMessage,
callLinkExists,
getAllCallLinks,
getCallLinkByRoomId,
getMessagesBetween,
getNearbyMessageFromDeletedSet,
getMostRecentAddressableMessages,
getMostRecentAddressableNondisappearingMessages,
getUnprocessedCount,
getUnprocessedById,
getAttachmentDownloadJob,
getStickerCount,
getAllStickerPacks,
getInstalledStickerPacks,
getUninstalledStickerPacks,
getStickerPackInfo,
getAllStickers,
getRecentStickers,
getRecentEmojis,
getAllBadges,
getAllBadgeImageFileLocalPaths,
getAllStoryDistributionsWithMembers,
getStoryDistributionWithMembers,
_getAllStoryDistributions,
_getAllStoryDistributionMembers,
_getAllStoryReads,
getLastStoryReadsForAuthor,
getMessagesNeedingUpgrade,
getMessagesWithVisualMediaAttachments,
getMessagesWithFileAttachments,
getMessageServerGuidsForSpam,
getJobsInQueue,
wasGroupCallRingPreviouslyCanceled,
getMaxMessageCounter,
getStatisticsForLogging,
getBackupCdnObjectMetadata,
// Server-only
getKnownMessageAttachments,
finishGetKnownMessageAttachments,
pageMessages,
finishPageMessages,
getKnownConversationAttachments,
};
export const DataWriter: ServerWritableInterface = {
close: closeWritable,
removeDB,
removeIndexedDBFiles,
2018-10-18 01:01:21 +00:00
createOrUpdateIdentityKey,
bulkAddIdentityKeys,
removeIdentityKeyById,
removeAllIdentityKeys,
createOrUpdateKyberPreKey,
bulkAddKyberPreKeys,
removeKyberPreKeyById,
removeKyberPreKeysByServiceId,
removeAllKyberPreKeys,
2018-10-18 01:01:21 +00:00
createOrUpdatePreKey,
bulkAddPreKeys,
removePreKeyById,
removePreKeysByServiceId,
2018-10-18 01:01:21 +00:00
removeAllPreKeys,
createOrUpdateSignedPreKey,
bulkAddSignedPreKeys,
removeSignedPreKeyById,
removeSignedPreKeysByServiceId,
2018-10-18 01:01:21 +00:00
removeAllSignedPreKeys,
createOrUpdateItem,
removeItemById,
removeAllItems,
createOrUpdateSenderKey,
removeAllSenderKeys,
2021-05-25 22:40:04 +00:00
removeSenderKeyById,
insertSentProto,
deleteSentProtosOlderThan,
deleteSentProtoByMessageId,
insertProtoRecipients,
deleteSentProtoRecipient,
removeAllSentProtos,
2024-07-22 18:16:33 +00:00
getSentProtoByRecipient,
2018-10-18 01:01:21 +00:00
createOrUpdateSession,
2019-09-26 19:56:31 +00:00
createOrUpdateSessions,
commitDecryptResult,
2018-10-18 01:01:21 +00:00
bulkAddSessions,
removeSessionById,
removeSessionsByConversation,
removeSessionsByServiceId,
2018-10-18 01:01:21 +00:00
removeAllSessions,
2018-09-21 01:47:19 +00:00
saveConversation,
saveConversations,
updateConversation,
2019-09-26 19:56:31 +00:00
updateConversations,
2018-09-21 01:47:19 +00:00
removeConversation,
_removeAllConversations,
updateAllConversationColors,
2022-07-08 20:46:25 +00:00
removeAllProfileKeyCredentials,
2024-07-22 18:16:33 +00:00
getUnreadByConversationAndMarkRead,
getUnreadReactionsAndMarkRead,
2019-01-14 21:47:19 +00:00
2024-05-20 18:15:39 +00:00
replaceAllEndorsementsForGroup,
deleteAllEndorsementsForGroup,
2018-09-21 01:47:19 +00:00
saveMessage,
saveMessages,
removeMessage,
2021-01-13 00:42:15 +00:00
removeMessages,
markReactionAsRead,
addReaction,
removeReactionFromConversation,
_removeAllReactions,
_removeAllMessages,
2023-03-27 23:48:57 +00:00
getUnreadEditedMessagesAndMarkRead,
2023-08-09 00:53:06 +00:00
clearCallHistory,
markCallHistoryDeleted,
cleanupCallHistoryMessages,
markCallHistoryRead,
markAllCallHistoryRead,
2024-06-26 00:58:38 +00:00
markAllCallHistoryReadInConversation,
2023-08-09 00:53:06 +00:00
saveCallHistory,
2024-02-08 18:01:30 +00:00
markCallHistoryMissed,
2024-04-01 19:19:35 +00:00
insertCallLink,
2024-04-25 17:09:05 +00:00
updateCallLinkAdminKeyByRoomId,
2024-04-01 19:19:35 +00:00
updateCallLinkState,
migrateConversationMessages,
2023-03-27 23:48:57 +00:00
saveEditedMessage,
2024-06-03 17:02:25 +00:00
saveEditedMessages,
removeSyncTaskById,
saveSyncTasks,
getAllSyncTasks,
getUnprocessedByIdsAndIncrementAttempts,
getAllUnprocessedIds,
updateUnprocessedWithData,
2019-09-26 19:56:31 +00:00
updateUnprocessedsWithData,
removeUnprocessed,
removeAllUnprocessed,
getNextAttachmentDownloadJobs,
saveAttachmentDownloadJob,
resetAttachmentDownloadActive,
removeAttachmentDownloadJob,
2024-05-29 23:46:43 +00:00
getNextAttachmentBackupJobs,
saveAttachmentBackupJob,
markAllAttachmentBackupJobsInactive,
removeAttachmentBackupJob,
clearAllAttachmentBackupJobs,
2024-05-29 23:46:43 +00:00
clearAllBackupCdnObjectMetadata,
saveBackupCdnObjectMetadata,
createOrUpdateStickerPack,
updateStickerPackStatus,
2022-08-03 17:10:49 +00:00
updateStickerPackInfo,
createOrUpdateSticker,
2024-07-11 19:44:09 +00:00
createOrUpdateStickers,
updateStickerLastUsed,
addStickerPackReference,
deleteStickerPackReference,
deleteStickerPack,
2022-08-03 17:10:49 +00:00
addUninstalledStickerPack,
removeUninstalledStickerPack,
installStickerPack,
uninstallStickerPack,
2021-01-27 22:39:45 +00:00
clearAllErrorStickerPackAttempts,
2019-05-24 23:58:27 +00:00
updateEmojiUsage,
2021-11-02 23:01:13 +00:00
updateOrCreateBadges,
badgeImageFileDownloaded,
2024-07-22 18:16:33 +00:00
getRecentStaleRingsAndMarkOlderMissed,
_deleteAllStoryDistributions,
createNewStoryDistribution,
modifyStoryDistribution,
modifyStoryDistributionMembers,
2022-07-01 00:52:03 +00:00
modifyStoryDistributionWithMembers,
deleteStoryDistribution,
_deleteAllStoryReads,
addNewStoryRead,
removeAll,
2018-10-18 01:01:21 +00:00
removeAllConfiguration,
eraseStorageServiceState,
2021-04-29 23:02:27 +00:00
insertJob,
deleteJob,
processGroupCallRingCancellation,
cleanExpiredGroupCallRingCancellations,
2021-08-20 16:06:15 +00:00
// Server-only
removeKnownStickers,
2019-08-07 00:40:25 +00:00
removeKnownDraftAttachments,
runCorruptionChecks,
};
2021-10-26 22:59:08 +00:00
type DatabaseQueryCache = Map<string, Statement<Array<unknown>>>;
const statementCache = new WeakMap<Database, DatabaseQueryCache>();
2024-04-01 19:19:35 +00:00
export function prepare<T extends Array<unknown> | Record<string, unknown>>(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
query: string,
{ pluck = false }: { pluck?: boolean } = {}
2022-11-14 19:35:37 +00:00
): Statement<T> {
let dbCache = statementCache.get(db);
if (!dbCache) {
dbCache = new Map();
statementCache.set(db, dbCache);
}
const cacheKey = `${pluck}:${query}`;
let result = dbCache.get(cacheKey) as Statement<T>;
if (!result) {
result = db.prepare<T>(query);
if (pluck === true) {
result.pluck();
}
dbCache.set(cacheKey, result);
}
return result;
}
function rowToConversation(row: ConversationRow): ConversationType {
const parsedJson = JSON.parse(row.json);
let profileLastFetchedAt: undefined | number;
if (isNormalNumber(row.profileLastFetchedAt)) {
profileLastFetchedAt = row.profileLastFetchedAt;
} else {
assertDev(
isNil(row.profileLastFetchedAt),
'profileLastFetchedAt contained invalid data; defaulting to undefined'
);
profileLastFetchedAt = undefined;
}
return {
...parsedJson,
profileLastFetchedAt,
};
}
function rowToSticker(row: StickerRow): StickerType {
return {
...row,
isCoverOnly: Boolean(row.isCoverOnly),
2021-07-09 19:36:10 +00:00
emoji: dropNull(row.emoji),
2024-07-11 19:44:09 +00:00
version: row.version || 1,
localKey: dropNull(row.localKey),
size: dropNull(row.size),
};
2021-10-26 22:59:08 +00:00
}
2024-07-22 18:16:33 +00:00
function keyDatabase(db: WritableDB, key: string): void {
2021-10-26 22:59:08 +00:00
// https://www.zetetic.net/sqlcipher/sqlcipher-api/#key
db.pragma(`key = "x'${key}'"`);
}
2024-07-22 18:16:33 +00:00
function switchToWAL(db: WritableDB): void {
2021-10-26 22:59:08 +00:00
// https://sqlite.org/wal.html
db.pragma('journal_mode = WAL');
db.pragma('synchronous = FULL');
}
2024-07-22 18:16:33 +00:00
function migrateSchemaVersion(db: WritableDB): void {
2021-10-26 22:59:08 +00:00
const userVersion = getUserVersion(db);
if (userVersion > 0) {
return;
}
2021-10-26 22:59:08 +00:00
const schemaVersion = getSchemaVersion(db);
const newUserVersion = schemaVersion > 18 ? 16 : schemaVersion;
logger.info(
'migrateSchemaVersion: Migrating from schema_version ' +
`${schemaVersion} to user_version ${newUserVersion}`
);
2021-10-26 22:59:08 +00:00
setUserVersion(db, newUserVersion);
}
2023-10-03 00:27:02 +00:00
function openAndMigrateDatabase(
filePath: string,
key: string,
readonly: boolean
2024-07-22 18:16:33 +00:00
): WritableDB {
let db: WritableDB | undefined;
2021-10-26 22:59:08 +00:00
// First, we try to open the database without any cipher changes
try {
2023-10-03 00:27:02 +00:00
db = new SQL(filePath, {
readonly,
2024-07-22 18:16:33 +00:00
}) as WritableDB;
2021-10-26 22:59:08 +00:00
keyDatabase(db, key);
switchToWAL(db);
migrateSchemaVersion(db);
2021-10-26 22:59:08 +00:00
return db;
} catch (error) {
if (db) {
db.close();
}
2021-10-26 22:59:08 +00:00
logger.info('migrateDatabase: Migration without cipher change failed');
}
2021-10-26 22:59:08 +00:00
// If that fails, we try to open the database with 3.x compatibility to extract the
// user_version (previously stored in schema_version, blown away by cipher_migrate).
2024-07-22 18:16:33 +00:00
db = new SQL(filePath) as WritableDB;
2021-10-26 22:59:08 +00:00
keyDatabase(db, key);
2021-10-26 22:59:08 +00:00
// https://www.zetetic.net/blog/2018/11/30/sqlcipher-400-release/#compatability-sqlcipher-4-0-0
db.pragma('cipher_compatibility = 3');
migrateSchemaVersion(db);
db.close();
2021-10-26 22:59:08 +00:00
// After migrating user_version -> schema_version, we reopen database, because we can't
// migrate to the latest ciphers after we've modified the defaults.
2024-07-22 18:16:33 +00:00
db = new SQL(filePath) as WritableDB;
2021-10-26 22:59:08 +00:00
keyDatabase(db, key);
2021-10-26 22:59:08 +00:00
db.pragma('cipher_migrate');
switchToWAL(db);
2021-10-26 22:59:08 +00:00
return db;
}
2021-10-26 22:59:08 +00:00
const INVALID_KEY = /[^0-9A-Fa-f]/;
2023-10-03 00:27:02 +00:00
function openAndSetUpSQLCipher(
filePath: string,
{ key, readonly }: { key: string; readonly: boolean }
) {
2021-10-26 22:59:08 +00:00
const match = INVALID_KEY.exec(key);
if (match) {
throw new Error(`setupSQLCipher: key '${key}' is not valid`);
}
2023-10-03 00:27:02 +00:00
const db = openAndMigrateDatabase(filePath, key, readonly);
2021-10-26 22:59:08 +00:00
// Because foreign key support is not enabled by default!
db.pragma('foreign_keys = ON');
2021-10-26 22:59:08 +00:00
return db;
}
let logger = consoleLogger;
let databaseFilePath: string | undefined;
let indexedDBPath: string | undefined;
2023-01-18 22:12:33 +00:00
SQL.setLogHandler((code, value) => {
logger.warn(`Database log code=${code}: ${value}`);
});
2024-07-22 18:16:33 +00:00
export function initialize({
configDir,
key,
2024-07-22 18:16:33 +00:00
isPrimary,
logger: suppliedLogger,
}: {
2023-10-10 23:55:32 +00:00
appVersion: string;
configDir: string;
key: string;
2024-07-22 18:16:33 +00:00
isPrimary: boolean;
logger: LoggerType;
2024-07-22 18:16:33 +00:00
}): WritableDB {
if (!isString(configDir)) {
throw new Error('initialize: configDir is required!');
}
if (!isString(key)) {
throw new Error('initialize: key is required!');
}
logger = suppliedLogger;
indexedDBPath = join(configDir, 'IndexedDB');
const dbDir = join(configDir, 'sql');
mkdirSync(dbDir, { recursive: true });
databaseFilePath = join(dbDir, 'db.sqlite');
2024-07-22 18:16:33 +00:00
let db: WritableDB | undefined;
2019-08-19 22:26:45 +00:00
try {
2024-07-22 18:16:33 +00:00
db = openAndSetUpSQLCipher(databaseFilePath, {
2023-10-03 00:27:02 +00:00
key,
readonly: false,
});
// For profiling use:
// db.pragma('cipher_profile=\'sqlcipher.log\'');
2024-07-22 18:16:33 +00:00
// Only the first worker gets to upgrade the schema. The rest just folow.
if (isPrimary) {
updateSchema(db, logger);
}
2019-08-19 22:26:45 +00:00
// test database
2024-07-22 18:16:33 +00:00
getMessageCount(db);
return db;
} catch (error) {
logger.error('Database startup error:', error.stack);
2024-07-22 18:16:33 +00:00
db?.close();
2021-03-04 21:44:57 +00:00
throw error;
}
}
2024-07-22 18:16:33 +00:00
export function setupTests(db: WritableDB): void {
const silentLogger = {
...consoleLogger,
info: noop,
};
logger = silentLogger;
updateSchema(db, logger);
}
2024-07-22 18:16:33 +00:00
function closeReadable(db: ReadableDB): void {
db.close();
}
2024-07-22 18:16:33 +00:00
function closeWritable(db: WritableDB): void {
2023-05-04 17:59:02 +00:00
// SQLLite documentation suggests that we run `PRAGMA optimize` right
// before closing the database connection.
2024-07-22 18:16:33 +00:00
db.pragma('optimize');
2024-03-15 14:20:33 +00:00
2024-07-22 18:16:33 +00:00
db.close();
2024-03-15 14:20:33 +00:00
}
2024-07-22 18:16:33 +00:00
function removeDB(db: WritableDB): void {
try {
db.close();
} catch (error) {
logger.error('removeDB: Failed to close database:', error.stack);
2024-03-15 14:20:33 +00:00
}
if (!databaseFilePath) {
throw new Error(
'removeDB: Cannot erase database without a databaseFilePath!'
);
}
logger.warn('removeDB: Removing all database files');
rimraf.sync(databaseFilePath);
rimraf.sync(`${databaseFilePath}-shm`);
rimraf.sync(`${databaseFilePath}-wal`);
}
2024-07-22 18:16:33 +00:00
function removeIndexedDBFiles(_db: WritableDB): void {
if (!indexedDBPath) {
throw new Error(
'removeIndexedDBFiles: Need to initialize and set indexedDBPath first!'
);
}
const pattern = join(indexedDBPath, '*.leveldb');
rimraf.sync(pattern);
indexedDBPath = undefined;
}
// This is okay to use for queries that:
//
// - Don't modify persistent tables, but create and do work in temporary
// tables
// - Integrity checks
//
2024-07-22 18:16:33 +00:00
function toUnsafeWritableDB(
db: ReadableDB,
_reason: 'only temp table use' | 'integrity check'
): WritableDB {
return db as unknown as WritableDB;
}
2018-10-18 01:01:21 +00:00
const IDENTITY_KEYS_TABLE = 'identityKeys';
2024-07-22 18:16:33 +00:00
function createOrUpdateIdentityKey(
db: WritableDB,
2022-07-28 16:35:29 +00:00
data: StoredIdentityKeyType
2024-07-22 18:16:33 +00:00
): void {
return createOrUpdate(db, IDENTITY_KEYS_TABLE, data);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getIdentityKeyById(
db: ReadableDB,
id: IdentityKeyIdType
2024-07-22 18:16:33 +00:00
): StoredIdentityKeyType | undefined {
return getById(db, IDENTITY_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function bulkAddIdentityKeys(
db: WritableDB,
2022-07-28 16:35:29 +00:00
array: Array<StoredIdentityKeyType>
2024-07-22 18:16:33 +00:00
): void {
return bulkAdd(db, IDENTITY_KEYS_TABLE, array);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeIdentityKeyById(db: WritableDB, id: IdentityKeyIdType): number {
return removeById(db, IDENTITY_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeAllIdentityKeys(db: WritableDB): number {
return removeAllFromTable(db, IDENTITY_KEYS_TABLE);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllIdentityKeys(db: ReadableDB): Array<StoredIdentityKeyType> {
return getAllFromTable(db, IDENTITY_KEYS_TABLE);
}
2018-10-18 01:01:21 +00:00
const KYBER_PRE_KEYS_TABLE = 'kyberPreKeys';
2024-07-22 18:16:33 +00:00
function createOrUpdateKyberPreKey(
db: WritableDB,
data: StoredKyberPreKeyType
2024-07-22 18:16:33 +00:00
): void {
return createOrUpdate(db, KYBER_PRE_KEYS_TABLE, data);
}
2024-07-22 18:16:33 +00:00
function getKyberPreKeyById(
db: ReadableDB,
id: PreKeyIdType
2024-07-22 18:16:33 +00:00
): StoredKyberPreKeyType | undefined {
return getById(db, KYBER_PRE_KEYS_TABLE, id);
}
2024-07-22 18:16:33 +00:00
function bulkAddKyberPreKeys(
db: WritableDB,
array: Array<StoredKyberPreKeyType>
2024-07-22 18:16:33 +00:00
): void {
return bulkAdd(db, KYBER_PRE_KEYS_TABLE, array);
}
2024-07-22 18:16:33 +00:00
function removeKyberPreKeyById(
db: WritableDB,
id: PreKeyIdType | Array<PreKeyIdType>
2024-07-22 18:16:33 +00:00
): number {
return removeById(db, KYBER_PRE_KEYS_TABLE, id);
}
2024-07-22 18:16:33 +00:00
function removeKyberPreKeysByServiceId(
db: WritableDB,
serviceId: ServiceIdString
2024-07-22 18:16:33 +00:00
): void {
2023-08-16 20:54:39 +00:00
db.prepare<Query>(
'DELETE FROM kyberPreKeys WHERE ourServiceId IS $serviceId;'
).run({
serviceId,
});
}
2024-07-22 18:16:33 +00:00
function removeAllKyberPreKeys(db: WritableDB): number {
return removeAllFromTable(db, KYBER_PRE_KEYS_TABLE);
}
2024-07-22 18:16:33 +00:00
function getAllKyberPreKeys(db: ReadableDB): Array<StoredKyberPreKeyType> {
return getAllFromTable(db, KYBER_PRE_KEYS_TABLE);
}
2018-10-18 01:01:21 +00:00
const PRE_KEYS_TABLE = 'preKeys';
2024-07-22 18:16:33 +00:00
function createOrUpdatePreKey(db: WritableDB, data: StoredPreKeyType): void {
return createOrUpdate(db, PRE_KEYS_TABLE, data);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getPreKeyById(
db: ReadableDB,
id: PreKeyIdType
2024-07-22 18:16:33 +00:00
): StoredPreKeyType | undefined {
return getById(db, PRE_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function bulkAddPreKeys(db: WritableDB, array: Array<StoredPreKeyType>): void {
return bulkAdd(db, PRE_KEYS_TABLE, array);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removePreKeyById(
db: WritableDB,
id: PreKeyIdType | Array<PreKeyIdType>
2024-07-22 18:16:33 +00:00
): number {
return removeById(db, PRE_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removePreKeysByServiceId(
db: WritableDB,
serviceId: ServiceIdString
2024-07-22 18:16:33 +00:00
): void {
2023-08-16 20:54:39 +00:00
db.prepare<Query>(
'DELETE FROM preKeys WHERE ourServiceId IS $serviceId;'
).run({
serviceId,
2022-07-28 16:35:29 +00:00
});
}
2024-07-22 18:16:33 +00:00
function removeAllPreKeys(db: WritableDB): number {
return removeAllFromTable(db, PRE_KEYS_TABLE);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllPreKeys(db: ReadableDB): Array<StoredPreKeyType> {
return getAllFromTable(db, PRE_KEYS_TABLE);
}
2018-10-18 01:01:21 +00:00
const SIGNED_PRE_KEYS_TABLE = 'signedPreKeys';
2024-07-22 18:16:33 +00:00
function createOrUpdateSignedPreKey(
db: WritableDB,
2022-07-28 16:35:29 +00:00
data: StoredSignedPreKeyType
2024-07-22 18:16:33 +00:00
): void {
return createOrUpdate(db, SIGNED_PRE_KEYS_TABLE, data);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getSignedPreKeyById(
db: ReadableDB,
id: SignedPreKeyIdType
2024-07-22 18:16:33 +00:00
): StoredSignedPreKeyType | undefined {
return getById(db, SIGNED_PRE_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function bulkAddSignedPreKeys(
db: WritableDB,
2022-07-28 16:35:29 +00:00
array: Array<StoredSignedPreKeyType>
2024-07-22 18:16:33 +00:00
): void {
return bulkAdd(db, SIGNED_PRE_KEYS_TABLE, array);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeSignedPreKeyById(
db: WritableDB,
id: SignedPreKeyIdType | Array<SignedPreKeyIdType>
2024-07-22 18:16:33 +00:00
): number {
return removeById(db, SIGNED_PRE_KEYS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeSignedPreKeysByServiceId(
db: WritableDB,
serviceId: ServiceIdString
2024-07-22 18:16:33 +00:00
): void {
2023-08-16 20:54:39 +00:00
db.prepare<Query>(
'DELETE FROM signedPreKeys WHERE ourServiceId IS $serviceId;'
).run({
serviceId,
2022-07-28 16:35:29 +00:00
});
}
2024-07-22 18:16:33 +00:00
function removeAllSignedPreKeys(db: WritableDB): number {
return removeAllFromTable(db, SIGNED_PRE_KEYS_TABLE);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllSignedPreKeys(db: ReadableDB): Array<StoredSignedPreKeyType> {
const rows: JSONRows = db
.prepare<EmptyQuery>(
`
SELECT json
FROM signedPreKeys
ORDER BY id ASC;
`
)
.all();
return rows.map(row => jsonToObject(row.json));
}
2018-10-18 01:01:21 +00:00
const ITEMS_TABLE = 'items';
2024-07-22 18:16:33 +00:00
function createOrUpdateItem<K extends ItemKeyType>(
db: WritableDB,
2022-07-28 16:35:29 +00:00
data: StoredItemType<K>
2024-07-22 18:16:33 +00:00
): void {
return createOrUpdate(db, ITEMS_TABLE, data);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getItemById<K extends ItemKeyType>(
db: ReadableDB,
id: K
2024-07-22 18:16:33 +00:00
): StoredItemType<K> | undefined {
return getById(db, ITEMS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllItems(db: ReadableDB): StoredAllItemsType {
const rows: JSONRows = db
.prepare<EmptyQuery>('SELECT json FROM items ORDER BY id ASC;')
.all();
2021-10-26 22:59:08 +00:00
type RawItemType = { id: ItemKeyType; value: unknown };
const items = rows.map(row => jsonToObject<RawItemType>(row.json));
2021-10-26 22:59:08 +00:00
const result: Record<ItemKeyType, unknown> = Object.create(null);
for (const { id, value } of items) {
2021-10-26 22:59:08 +00:00
result[id] = value;
}
2022-07-28 16:35:29 +00:00
return result as unknown as StoredAllItemsType;
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeItemById(
db: WritableDB,
id: ItemKeyType | Array<ItemKeyType>
2024-07-22 18:16:33 +00:00
): number {
return removeById(db, ITEMS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeAllItems(db: WritableDB): number {
return removeAllFromTable(db, ITEMS_TABLE);
}
2024-07-22 18:16:33 +00:00
function createOrUpdateSenderKey(db: WritableDB, key: SenderKeyType): void {
prepare(
db,
`
INSERT OR REPLACE INTO senderKeys (
id,
senderId,
distributionId,
data,
lastUpdatedDate
) values (
$id,
$senderId,
$distributionId,
$data,
$lastUpdatedDate
)
`
).run(key);
}
2024-07-22 18:16:33 +00:00
function getSenderKeyById(
db: ReadableDB,
id: SenderKeyIdType
2024-07-22 18:16:33 +00:00
): SenderKeyType | undefined {
const row = prepare(db, 'SELECT * FROM senderKeys WHERE id = $id').get({
id,
});
return row;
}
2024-07-22 18:16:33 +00:00
function removeAllSenderKeys(db: WritableDB): void {
prepare<EmptyQuery>(db, 'DELETE FROM senderKeys').run();
}
2024-07-22 18:16:33 +00:00
function getAllSenderKeys(db: ReadableDB): Array<SenderKeyType> {
const rows = prepare<EmptyQuery>(db, 'SELECT * FROM senderKeys').all();
return rows;
}
2024-07-22 18:16:33 +00:00
function removeSenderKeyById(db: WritableDB, id: SenderKeyIdType): void {
2021-05-25 22:40:04 +00:00
prepare(db, 'DELETE FROM senderKeys WHERE id = $id').run({ id });
}
2024-07-22 18:16:33 +00:00
function insertSentProto(
db: WritableDB,
proto: SentProtoType,
options: {
recipients: SentRecipientsType;
messageIds: SentMessagesType;
}
2024-07-22 18:16:33 +00:00
): number {
const { recipients, messageIds } = options;
// Note: we use `pluck` in this function to fetch only the first column of returned row.
return db.transaction(() => {
// 1. Insert the payload, fetching its primary key id
const info = prepare(
db,
`
INSERT INTO sendLogPayloads (
contentHint,
proto,
timestamp,
2022-08-15 21:53:33 +00:00
urgent,
hasPniSignatureMessage
) VALUES (
$contentHint,
$proto,
$timestamp,
2022-08-15 21:53:33 +00:00
$urgent,
$hasPniSignatureMessage
);
`
).run({
...proto,
urgent: proto.urgent ? 1 : 0,
2022-08-15 21:53:33 +00:00
hasPniSignatureMessage: proto.hasPniSignatureMessage ? 1 : 0,
});
const id = parseIntOrThrow(
info.lastInsertRowid,
'insertSentProto/lastInsertRowid'
);
// 2. Insert a record for each recipient device.
const recipientStatement = prepare(
db,
`
INSERT INTO sendLogRecipients (
payloadId,
2023-08-16 20:54:39 +00:00
recipientServiceId,
deviceId
) VALUES (
$id,
2023-08-16 20:54:39 +00:00
$recipientServiceId,
$deviceId
);
`
);
const recipientServiceIds = Object.keys(recipients);
for (const recipientServiceId of recipientServiceIds) {
strictAssert(
isServiceIdString(recipientServiceId),
'Recipient must be a service id'
);
const deviceIds = recipients[recipientServiceId];
for (const deviceId of deviceIds) {
recipientStatement.run({
id,
2023-08-16 20:54:39 +00:00
recipientServiceId,
deviceId,
});
}
}
// 2. Insert a record for each message referenced by this payload.
const messageStatement = prepare(
db,
`
INSERT INTO sendLogMessageIds (
payloadId,
messageId
) VALUES (
$id,
$messageId
);
`
);
for (const messageId of new Set(messageIds)) {
messageStatement.run({
id,
messageId,
});
}
return id;
})();
}
2024-07-22 18:16:33 +00:00
function deleteSentProtosOlderThan(db: WritableDB, timestamp: number): void {
prepare(
db,
`
DELETE FROM sendLogPayloads
WHERE
timestamp IS NULL OR
timestamp < $timestamp;
`
).run({
timestamp,
});
}
2024-07-22 18:16:33 +00:00
function deleteSentProtoByMessageId(db: WritableDB, messageId: string): void {
prepare(
db,
`
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = $messageId
);
`
).run({
messageId,
});
}
2024-07-22 18:16:33 +00:00
function insertProtoRecipients(
db: WritableDB,
{
id,
recipientServiceId,
deviceIds,
}: {
id: number;
recipientServiceId: ServiceIdString;
deviceIds: Array<number>;
}
): void {
db.transaction(() => {
const statement = prepare(
db,
`
INSERT INTO sendLogRecipients (
payloadId,
2023-08-16 20:54:39 +00:00
recipientServiceId,
deviceId
) VALUES (
$id,
2023-08-16 20:54:39 +00:00
$recipientServiceId,
$deviceId
);
`
);
for (const deviceId of deviceIds) {
statement.run({
id,
2023-08-16 20:54:39 +00:00
recipientServiceId,
deviceId,
});
}
})();
}
2024-07-22 18:16:33 +00:00
function deleteSentProtoRecipient(
db: WritableDB,
2021-08-31 21:35:01 +00:00
options:
| DeleteSentProtoRecipientOptionsType
| ReadonlyArray<DeleteSentProtoRecipientOptionsType>
2024-07-22 18:16:33 +00:00
): DeleteSentProtoRecipientResultType {
2021-08-31 21:35:01 +00:00
const items = Array.isArray(options) ? options : [options];
// Note: we use `pluck` in this function to fetch only the first column of
// returned row.
2022-08-15 21:53:33 +00:00
return db.transaction(() => {
2023-08-16 20:54:39 +00:00
const successfulPhoneNumberShares = new Array<ServiceIdString>();
2022-08-15 21:53:33 +00:00
2021-08-31 21:35:01 +00:00
for (const item of items) {
const { timestamp, recipientServiceId, deviceId } = item;
2021-08-31 21:35:01 +00:00
// 1. Figure out what payload we're talking about.
const rows = prepare(
db,
`
2022-08-15 21:53:33 +00:00
SELECT sendLogPayloads.id, sendLogPayloads.hasPniSignatureMessage
FROM sendLogPayloads
2021-08-31 21:35:01 +00:00
INNER JOIN sendLogRecipients
ON sendLogRecipients.payloadId = sendLogPayloads.id
WHERE
sendLogPayloads.timestamp = $timestamp AND
2023-08-16 20:54:39 +00:00
sendLogRecipients.recipientServiceId = $recipientServiceId AND
2021-08-31 21:35:01 +00:00
sendLogRecipients.deviceId = $deviceId;
`
2023-08-16 20:54:39 +00:00
).all({ timestamp, recipientServiceId, deviceId });
2021-08-31 21:35:01 +00:00
if (!rows.length) {
continue;
}
if (rows.length > 1) {
logger.warn(
2021-08-31 21:35:01 +00:00
'deleteSentProtoRecipient: More than one payload matches ' +
`recipient and timestamp ${timestamp}. Using the first.`
);
}
2022-08-15 21:53:33 +00:00
const { id, hasPniSignatureMessage } = rows[0];
2021-08-31 21:35:01 +00:00
// 2. Delete the recipient/device combination in question.
prepare(
db,
`
DELETE FROM sendLogRecipients
WHERE
payloadId = $id AND
2023-08-16 20:54:39 +00:00
recipientServiceId = $recipientServiceId AND
2021-08-31 21:35:01 +00:00
deviceId = $deviceId;
`
2023-08-16 20:54:39 +00:00
).run({ id, recipientServiceId, deviceId });
2021-08-31 21:35:01 +00:00
// 3. See how many more recipient devices there were for this payload.
2022-08-15 21:53:33 +00:00
const remainingDevices = prepare(
db,
`
2023-01-17 21:07:21 +00:00
SELECT count(1) FROM sendLogRecipients
2023-08-16 20:54:39 +00:00
WHERE payloadId = $id AND recipientServiceId = $recipientServiceId;
`,
{ pluck: true }
2023-08-16 20:54:39 +00:00
).get({ id, recipientServiceId });
2022-08-15 21:53:33 +00:00
// 4. If there are no remaining devices for this recipient and we included
// the pni signature in the proto - return the recipient to the caller.
if (remainingDevices === 0 && hasPniSignatureMessage) {
logger.info(
'deleteSentProtoRecipient: ' +
`Successfully shared phone number with ${recipientServiceId} ` +
2022-08-15 21:53:33 +00:00
`through message ${timestamp}`
);
successfulPhoneNumberShares.push(recipientServiceId);
2022-08-15 21:53:33 +00:00
}
strictAssert(
isNumber(remainingDevices),
'deleteSentProtoRecipient: select count() returned non-number!'
);
// 5. See how many more recipients there were for this payload.
const remainingTotal = prepare(
2021-08-31 21:35:01 +00:00
db,
'SELECT count(1) FROM sendLogRecipients WHERE payloadId = $id;',
{ pluck: true }
).get({ id });
2022-08-15 21:53:33 +00:00
strictAssert(
isNumber(remainingTotal),
'deleteSentProtoRecipient: select count() returned non-number!'
);
2022-08-15 21:53:33 +00:00
if (remainingTotal > 0) {
2021-08-31 21:35:01 +00:00
continue;
}
2022-08-15 21:53:33 +00:00
// 6. Delete the entire payload if there are no more recipients left.
logger.info(
2021-08-31 21:35:01 +00:00
'deleteSentProtoRecipient: ' +
`Deleting proto payload for timestamp ${timestamp}`
);
2022-08-15 21:53:33 +00:00
2021-08-31 21:35:01 +00:00
prepare(db, 'DELETE FROM sendLogPayloads WHERE id = $id;').run({
id,
});
}
2022-08-15 21:53:33 +00:00
return { successfulPhoneNumberShares };
})();
}
2024-07-22 18:16:33 +00:00
function getSentProtoByRecipient(
db: WritableDB,
{
now,
recipientServiceId,
timestamp,
}: {
now: number;
recipientServiceId: ServiceIdString;
timestamp: number;
}
): SentProtoWithMessageIdsType | undefined {
const HOUR = 1000 * 60 * 60;
const oneDayAgo = now - HOUR * 24;
2024-07-22 18:16:33 +00:00
deleteSentProtosOlderThan(db, oneDayAgo);
2023-10-03 00:27:02 +00:00
const row = prepare(
db,
`
SELECT
sendLogPayloads.*,
GROUP_CONCAT(DISTINCT sendLogMessageIds.messageId) AS messageIds
FROM sendLogPayloads
INNER JOIN sendLogRecipients ON sendLogRecipients.payloadId = sendLogPayloads.id
LEFT JOIN sendLogMessageIds ON sendLogMessageIds.payloadId = sendLogPayloads.id
WHERE
sendLogPayloads.timestamp = $timestamp AND
2023-08-16 20:54:39 +00:00
sendLogRecipients.recipientServiceId = $recipientServiceId
GROUP BY sendLogPayloads.id;
`
).get({
timestamp,
2023-08-16 20:54:39 +00:00
recipientServiceId,
});
if (!row) {
return undefined;
}
const { messageIds } = row;
return {
...row,
urgent: isNumber(row.urgent) ? Boolean(row.urgent) : true,
2022-08-15 21:53:33 +00:00
hasPniSignatureMessage: isNumber(row.hasPniSignatureMessage)
? Boolean(row.hasPniSignatureMessage)
: true,
messageIds: messageIds ? messageIds.split(',') : [],
};
}
2024-07-22 18:16:33 +00:00
function removeAllSentProtos(db: WritableDB): void {
prepare<EmptyQuery>(db, 'DELETE FROM sendLogPayloads;').run();
}
2024-07-22 18:16:33 +00:00
function getAllSentProtos(db: ReadableDB): Array<SentProtoType> {
const rows = prepare<EmptyQuery>(db, 'SELECT * FROM sendLogPayloads;').all();
return rows.map(row => ({
...row,
urgent: isNumber(row.urgent) ? Boolean(row.urgent) : true,
2022-08-15 21:53:33 +00:00
hasPniSignatureMessage: isNumber(row.hasPniSignatureMessage)
? Boolean(row.hasPniSignatureMessage)
: true,
}));
}
2024-07-22 18:16:33 +00:00
function _getAllSentProtoRecipients(
db: ReadableDB
): Array<SentRecipientsDBType> {
const rows = prepare<EmptyQuery>(
db,
'SELECT * FROM sendLogRecipients;'
).all();
return rows;
}
2024-07-22 18:16:33 +00:00
function _getAllSentProtoMessageIds(db: ReadableDB): Array<SentMessageDBType> {
const rows = prepare<EmptyQuery>(
db,
'SELECT * FROM sendLogMessageIds;'
).all();
return rows;
}
2018-10-18 01:01:21 +00:00
const SESSIONS_TABLE = 'sessions';
2024-07-22 18:16:33 +00:00
function createOrUpdateSession(db: WritableDB, data: SessionType): void {
2023-08-16 20:54:39 +00:00
const { id, conversationId, ourServiceId, serviceId } = data;
2018-10-18 01:01:21 +00:00
if (!id) {
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy id'
);
}
if (!conversationId) {
2018-10-18 01:01:21 +00:00
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy conversationId'
2018-10-18 01:01:21 +00:00
);
}
prepare(
db,
`
INSERT OR REPLACE INTO sessions (
2018-10-18 01:01:21 +00:00
id,
conversationId,
2023-08-16 20:54:39 +00:00
ourServiceId,
serviceId,
2018-10-18 01:01:21 +00:00
json
) values (
$id,
$conversationId,
2023-08-16 20:54:39 +00:00
$ourServiceId,
$serviceId,
2018-10-18 01:01:21 +00:00
$json
)
`
).run({
id,
conversationId,
2023-08-16 20:54:39 +00:00
ourServiceId,
serviceId,
json: objectToJSON(data),
});
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function createOrUpdateSessions(
db: WritableDB,
array: Array<SessionType>
2024-07-22 18:16:33 +00:00
): void {
db.transaction(() => {
for (const item of array) {
2024-07-22 18:16:33 +00:00
createOrUpdateSession(db, item);
}
})();
2019-09-26 19:56:31 +00:00
}
2024-07-22 18:16:33 +00:00
function commitDecryptResult(
db: WritableDB,
{
senderKeys,
sessions,
unprocessed,
}: {
senderKeys: Array<SenderKeyType>;
sessions: Array<SessionType>;
unprocessed: Array<UnprocessedType>;
}
): void {
db.transaction(() => {
for (const item of senderKeys) {
2024-07-22 18:16:33 +00:00
createOrUpdateSenderKey(db, item);
}
for (const item of sessions) {
2024-07-22 18:16:33 +00:00
createOrUpdateSession(db, item);
}
for (const item of unprocessed) {
2024-07-22 18:16:33 +00:00
saveUnprocessed(db, item);
}
})();
}
2024-07-22 18:16:33 +00:00
function bulkAddSessions(db: WritableDB, array: Array<SessionType>): void {
return bulkAdd(db, SESSIONS_TABLE, array);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeSessionById(db: WritableDB, id: SessionIdType): number {
return removeById(db, SESSIONS_TABLE, id);
2018-10-18 01:01:21 +00:00
}
2024-07-22 18:16:33 +00:00
function removeSessionsByConversation(
db: WritableDB,
conversationId: string
2024-07-22 18:16:33 +00:00
): void {
db.prepare<Query>(
`
DELETE FROM sessions
WHERE conversationId = $conversationId;
`
).run({
conversationId,
2018-10-18 01:01:21 +00:00
});
}
2024-07-22 18:16:33 +00:00
function removeSessionsByServiceId(
db: WritableDB,
serviceId: ServiceIdString
2024-07-22 18:16:33 +00:00
): void {
db.prepare<Query>(
`
DELETE FROM sessions
2023-08-16 20:54:39 +00:00
WHERE serviceId = $serviceId;
`
).run({
2023-08-16 20:54:39 +00:00
serviceId,
});
}
2024-07-22 18:16:33 +00:00
function removeAllSessions(db: WritableDB): number {
return removeAllFromTable(db, SESSIONS_TABLE);
}
2024-07-22 18:16:33 +00:00
function getAllSessions(db: ReadableDB): Array<SessionType> {
return getAllFromTable(db, SESSIONS_TABLE);
2018-10-18 01:01:21 +00:00
}
2021-10-26 22:59:08 +00:00
// Conversations
2018-10-18 01:01:21 +00:00
2024-07-22 18:16:33 +00:00
function getConversationCount(db: ReadableDB): number {
return getCountFromTable(db, 'conversations');
2018-10-18 01:01:21 +00:00
}
2021-10-26 22:59:08 +00:00
function getConversationMembersList({ members, membersV2 }: ConversationType) {
if (membersV2) {
2023-08-16 20:54:39 +00:00
return membersV2.map((item: GroupV2MemberType) => item.aci).join(' ');
2018-10-18 01:01:21 +00:00
}
2021-10-26 22:59:08 +00:00
if (members) {
return members.join(' ');
2018-09-21 01:47:19 +00:00
}
2021-10-26 22:59:08 +00:00
return null;
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function saveConversation(db: WritableDB, data: ConversationType): void {
const {
active_at,
e164,
groupId,
id,
name,
profileFamilyName,
profileName,
profileLastFetchedAt,
type,
2023-08-16 20:54:39 +00:00
serviceId,
} = data;
2018-09-21 01:47:19 +00:00
2021-10-26 22:59:08 +00:00
const membersList = getConversationMembersList(data);
2020-09-09 02:25:05 +00:00
db.prepare<Query>(
`
INSERT INTO conversations (
id,
json,
e164,
2023-08-16 20:54:39 +00:00
serviceId,
groupId,
active_at,
type,
members,
name,
profileName,
profileFamilyName,
profileFullName,
profileLastFetchedAt
) values (
$id,
$json,
$e164,
2023-08-16 20:54:39 +00:00
$serviceId,
$groupId,
$active_at,
$type,
$members,
$name,
$profileName,
$profileFamilyName,
$profileFullName,
$profileLastFetchedAt
);
`
).run({
2018-09-21 01:47:19 +00:00
id,
json: objectToJSON(
2024-07-11 19:44:09 +00:00
omit(data, ['profileLastFetchedAt', 'unblurredAvatarUrl'])
),
2018-09-21 01:47:19 +00:00
e164: e164 || null,
2023-08-16 20:54:39 +00:00
serviceId: serviceId || null,
groupId: groupId || null,
active_at: active_at || null,
2018-09-21 01:47:19 +00:00
type,
members: membersList,
name: name || null,
profileName: profileName || null,
profileFamilyName: profileFamilyName || null,
profileFullName: combineNames(profileName, profileFamilyName) || null,
profileLastFetchedAt: profileLastFetchedAt || null,
});
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function saveConversations(
db: WritableDB,
arrayOfConversations: Array<ConversationType>
2024-07-22 18:16:33 +00:00
): void {
db.transaction(() => {
for (const conversation of arrayOfConversations) {
2024-07-22 18:16:33 +00:00
saveConversation(db, conversation);
}
})();
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function updateConversation(db: WritableDB, data: ConversationType): void {
const {
id,
active_at,
type,
name,
profileName,
profileFamilyName,
profileLastFetchedAt,
e164,
2023-08-16 20:54:39 +00:00
serviceId,
} = data;
2018-09-21 01:47:19 +00:00
2021-10-26 22:59:08 +00:00
const membersList = getConversationMembersList(data);
2020-09-09 02:25:05 +00:00
db.prepare(
`
UPDATE conversations SET
json = $json,
e164 = $e164,
2023-08-16 20:54:39 +00:00
serviceId = $serviceId,
active_at = $active_at,
type = $type,
members = $members,
name = $name,
profileName = $profileName,
profileFamilyName = $profileFamilyName,
profileFullName = $profileFullName,
profileLastFetchedAt = $profileLastFetchedAt
WHERE id = $id;
`
).run({
id,
json: objectToJSON(
2024-07-11 19:44:09 +00:00
omit(data, ['profileLastFetchedAt', 'unblurredAvatarUrl'])
),
e164: e164 || null,
2023-08-16 20:54:39 +00:00
serviceId: serviceId || null,
active_at: active_at || null,
type,
members: membersList,
name: name || null,
profileName: profileName || null,
profileFamilyName: profileFamilyName || null,
profileFullName: combineNames(profileName, profileFamilyName) || null,
profileLastFetchedAt: profileLastFetchedAt || null,
});
2018-09-21 01:47:19 +00:00
}
2021-03-04 21:44:57 +00:00
2024-07-22 18:16:33 +00:00
function updateConversations(
db: WritableDB,
array: Array<ConversationType>
2024-07-22 18:16:33 +00:00
): void {
db.transaction(() => {
for (const item of array) {
2024-07-22 18:16:33 +00:00
updateConversation(db, item);
}
})();
2019-09-26 19:56:31 +00:00
}
2018-09-21 01:47:19 +00:00
2024-07-22 18:16:33 +00:00
function removeConversations(db: WritableDB, ids: ReadonlyArray<string>): void {
// Our node interface doesn't seem to allow you to replace one single ? with an array
db.prepare<ArrayQuery>(
`
DELETE FROM conversations
WHERE id IN ( ${ids.map(() => '?').join(', ')} );
`
).run(ids);
}
2024-07-22 18:16:33 +00:00
function removeConversation(db: WritableDB, id: Array<string> | string): void {
2018-09-21 01:47:19 +00:00
if (!Array.isArray(id)) {
db.prepare<Query>('DELETE FROM conversations WHERE id = $id;').run({
id,
});
2018-09-21 01:47:19 +00:00
return;
}
if (!id.length) {
throw new Error('removeConversation: No ids to delete!');
}
2024-07-22 18:16:33 +00:00
batchMultiVarQuery(db, id, ids => removeConversations(db, ids));
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function _removeAllConversations(db: WritableDB): void {
db.prepare<EmptyQuery>('DELETE from conversations;').run();
}
2024-07-22 18:16:33 +00:00
function getConversationById(
db: ReadableDB,
id: string
2024-07-22 18:16:33 +00:00
): ConversationType | undefined {
const row: { json: string } = db
.prepare<Query>('SELECT json FROM conversations WHERE id = $id;')
.get({ id });
2018-09-21 01:47:19 +00:00
if (!row) {
return undefined;
2018-09-21 01:47:19 +00:00
}
return jsonToObject(row.json);
}
2024-07-22 18:16:33 +00:00
function getAllConversations(db: ReadableDB): Array<ConversationType> {
const rows: ConversationRows = db
.prepare<EmptyQuery>(
`
SELECT json, profileLastFetchedAt
FROM conversations
ORDER BY id ASC;
`
)
.all();
return rows.map(row => rowToConversation(row));
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllConversationIds(db: ReadableDB): Array<string> {
const rows: Array<{ id: string }> = db
.prepare<EmptyQuery>(
`
SELECT id FROM conversations ORDER BY id ASC;
`
)
.all();
return rows.map(row => row.id);
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function getAllGroupsInvolvingServiceId(
db: ReadableDB,
serviceId: ServiceIdString
2024-07-22 18:16:33 +00:00
): Array<ConversationType> {
const rows: ConversationRows = db
.prepare<Query>(
`
SELECT json, profileLastFetchedAt
FROM conversations WHERE
type = 'group' AND
2023-08-16 20:54:39 +00:00
members LIKE $serviceId
ORDER BY id ASC;
`
)
.all({
2023-08-16 20:54:39 +00:00
serviceId: `%${serviceId}%`,
});
2018-09-21 01:47:19 +00:00
return rows.map(row => rowToConversation(row));
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function searchMessages(
db: ReadableDB,
{
query,
options,
conversationId,
contactServiceIdsMatchingQuery,
}: {
query: string;
options?: { limit?: number };
conversationId?: string;
contactServiceIdsMatchingQuery?: Array<ServiceIdString>;
}
): Array<ServerSearchResultMessageType> {
const { limit = conversationId ? 100 : 500 } = options ?? {};
2024-07-22 18:16:33 +00:00
const writable = toUnsafeWritableDB(db, 'only temp table use');
2024-07-22 18:16:33 +00:00
const normalizedQuery = writable
2023-12-04 17:38:40 +00:00
.signalTokenize(query)
.map(token => `"${token.replace(/"/g, '""')}"*`)
.join(' ');
2024-02-27 16:11:49 +00:00
// FTS5 is not happy about empty "MATCH" so short-circuit early.
if (!normalizedQuery) {
return [];
}
// sqlite queries with a join on a virtual table (like FTS5) are de-optimized
// and can't use indices for ordering results. Instead an in-memory index of
// the join rows is sorted on the fly, and this becomes substantially
// slower when there are large columns in it (like `messages.json`).
//
// Thus here we take an indirect approach and store `rowid`s in a temporary
// table for all messages that match the FTS query. Then we create another
// table to sort and limit the results, and finally join on it when fetch
// the snippets and json. The benefit of this is that the `ORDER BY` and
// `LIMIT` happen without virtual table and are thus covered by
// `messages_searchOrder` index.
2024-07-22 18:16:33 +00:00
return writable.transaction(() => {
writable.exec(
`
CREATE TEMP TABLE tmp_results(rowid INTEGER PRIMARY KEY ASC);
CREATE TEMP TABLE tmp_filtered_results(rowid INTEGER PRIMARY KEY ASC);
`
);
2024-07-22 18:16:33 +00:00
writable
.prepare<Query>(
`
INSERT INTO tmp_results (rowid)
SELECT
rowid
FROM
messages_fts
WHERE
messages_fts.body MATCH $query;
`
2024-07-22 18:16:33 +00:00
)
.run({ query: normalizedQuery });
2019-01-14 21:47:19 +00:00
if (conversationId === undefined) {
2024-07-22 18:16:33 +00:00
writable
.prepare<Query>(
`
INSERT INTO tmp_filtered_results (rowid)
SELECT
tmp_results.rowid
FROM
tmp_results
INNER JOIN
messages ON messages.rowid = tmp_results.rowid
ORDER BY messages.received_at DESC, messages.sent_at DESC
LIMIT $limit;
`
2024-07-22 18:16:33 +00:00
)
.run({ limit });
} else {
2024-07-22 18:16:33 +00:00
writable
.prepare<Query>(
`
INSERT INTO tmp_filtered_results (rowid)
SELECT
tmp_results.rowid
FROM
tmp_results
INNER JOIN
messages ON messages.rowid = tmp_results.rowid
WHERE
messages.conversationId = $conversationId
ORDER BY messages.received_at DESC, messages.sent_at DESC
LIMIT $limit;
`
2024-07-22 18:16:33 +00:00
)
.run({ conversationId, limit });
}
// The `MATCH` is necessary in order to for `snippet()` helper function to
// give us the right results. We can't call `snippet()` in the query above
// because it would bloat the temporary table with text data and we want
// to keep its size minimal for `ORDER BY` + `LIMIT` to be fast.
const ftsFragment = sqlFragment`
SELECT
messages.rowid,
messages.json,
messages.sent_at,
messages.received_at,
snippet(messages_fts, -1, ${SNIPPET_LEFT_PLACEHOLDER}, ${SNIPPET_RIGHT_PLACEHOLDER}, ${SNIPPET_TRUNCATION_PLACEHOLDER}, 10) AS ftsSnippet
FROM tmp_filtered_results
INNER JOIN messages_fts
ON messages_fts.rowid = tmp_filtered_results.rowid
INNER JOIN messages
ON messages.rowid = tmp_filtered_results.rowid
WHERE
2023-12-04 17:38:40 +00:00
messages_fts.body MATCH ${normalizedQuery}
ORDER BY messages.received_at DESC, messages.sent_at DESC
LIMIT ${limit}
`;
let result: Array<ServerSearchResultMessageType>;
2023-08-16 20:54:39 +00:00
if (!contactServiceIdsMatchingQuery?.length) {
const [sqlQuery, params] = sql`${ftsFragment};`;
2024-07-22 18:16:33 +00:00
result = writable.prepare(sqlQuery).all(params);
} else {
2024-07-22 18:16:33 +00:00
// If contactServiceIdsMatchingQuery is not empty, we due an OUTER JOIN
// between:
// 1) the messages that mention at least one of
// contactServiceIdsMatchingQuery, and
// 2) the messages that match all the search terms via FTS
//
2024-07-22 18:16:33 +00:00
// Note: this groups the results by rowid, so even if one message
// mentions multiple matching UUIDs, we only return one to be
// highlighted
const [sqlQuery, params] = sql`
2023-08-09 00:53:06 +00:00
SELECT
messages.rowid as rowid,
2023-08-09 00:53:06 +00:00
COALESCE(messages.json, ftsResults.json) as json,
COALESCE(messages.sent_at, ftsResults.sent_at) as sent_at,
COALESCE(messages.received_at, ftsResults.received_at) as received_at,
2023-08-09 00:53:06 +00:00
ftsResults.ftsSnippet,
2023-08-16 20:54:39 +00:00
mentionAci,
2023-08-09 00:53:06 +00:00
start as mentionStart,
length as mentionLength
FROM mentions
2023-08-09 00:53:06 +00:00
INNER JOIN messages
ON
messages.id = mentions.messageId
2023-08-16 20:54:39 +00:00
AND mentions.mentionAci IN (
2023-11-22 22:48:53 +00:00
${sqlJoin(contactServiceIdsMatchingQuery)}
2023-08-09 00:53:06 +00:00
)
AND ${
conversationId
? sqlFragment`messages.conversationId = ${conversationId}`
: '1 IS 1'
}
2023-08-09 00:53:06 +00:00
AND messages.isViewOnce IS NOT 1
AND messages.storyId IS NULL
FULL OUTER JOIN (
${ftsFragment}
2023-08-09 00:53:06 +00:00
) as ftsResults
USING (rowid)
GROUP BY rowid
ORDER BY received_at DESC, sent_at DESC
LIMIT ${limit};
`;
2024-07-22 18:16:33 +00:00
result = writable.prepare(sqlQuery).all(params);
}
2024-07-22 18:16:33 +00:00
writable.exec(
`
DROP TABLE tmp_results;
DROP TABLE tmp_filtered_results;
`
);
return result;
})();
2019-01-14 21:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function getStoryCount(db: ReadableDB, conversationId: string): number {
return db
.prepare<Query>(
`
2023-01-17 21:07:21 +00:00
SELECT count(1)
FROM messages
2024-07-22 18:16:33 +00:00
WHERE conversationId = $conversationId AND isStory = 1;
`
)
2021-10-26 22:59:08 +00:00
.pluck()
.get({ conversationId });
2021-10-26 22:59:08 +00:00
}
2024-07-22 18:16:33 +00:00
function getMessageCount(db: ReadableDB, conversationId?: string): number {
if (conversationId === undefined) {
return getCountFromTable(db, 'messages');
}
const count = db
2022-03-29 01:10:08 +00:00
.prepare<Query>(
`
2023-01-17 21:07:21 +00:00
SELECT count(1)
2022-03-29 01:10:08 +00:00
FROM messages
2024-07-22 18:16:33 +00:00
WHERE conversationId = $conversationId;
2022-03-29 01:10:08 +00:00
`
)
.pluck()
.get({ conversationId });
2024-07-22 18:16:33 +00:00
return count;
}
// Note: we really only use this in 1:1 conversations, where story replies are always
// shown, so this has no need to be story-aware.
2024-07-22 18:16:33 +00:00
function hasUserInitiatedMessages(
db: ReadableDB,
conversationId: string
): boolean {
2022-11-28 17:19:48 +00:00
const exists: number = db
.prepare<Query>(
`
2022-11-28 17:19:48 +00:00
SELECT EXISTS(
SELECT 1 FROM messages
2023-01-17 21:07:21 +00:00
INDEXED BY message_user_initiated
2022-11-28 17:19:48 +00:00
WHERE
2023-01-17 21:07:21 +00:00
conversationId IS $conversationId AND
isUserInitiatedMessage IS 1
2022-11-28 17:19:48 +00:00
);
`
)
2022-11-28 17:19:48 +00:00
.pluck()
.get({ conversationId });
2022-11-28 17:19:48 +00:00
return exists !== 0;
}
2024-07-22 18:16:33 +00:00
export function getMostRecentAddressableMessages(
db: ReadableDB,
conversationId: string,
limit = 5
): Array<MessageType> {
const [query, parameters] = sql`
SELECT json FROM messages
INDEXED BY messages_by_date_addressable
WHERE
conversationId IS ${conversationId} AND
isAddressableMessage = 1
ORDER BY received_at DESC, sent_at DESC
LIMIT ${limit};
`;
const rows = db.prepare(query).all(parameters);
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
export function getMostRecentAddressableNondisappearingMessages(
db: ReadableDB,
conversationId: string,
limit = 5
): Array<MessageType> {
const [query, parameters] = sql`
SELECT json FROM messages
INDEXED BY messages_by_date_addressable_nondisappearing
WHERE
expireTimer IS NULL AND
conversationId IS ${conversationId} AND
isAddressableMessage = 1
ORDER BY received_at DESC, sent_at DESC
LIMIT ${limit};
`;
const rows = db.prepare(query).all(parameters);
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
export function removeSyncTaskById(db: WritableDB, id: string): void {
const [query, parameters] = sql`
DELETE FROM syncTasks
WHERE id IS ${id}
`;
db.prepare(query).run(parameters);
}
2024-07-22 18:16:33 +00:00
export function saveSyncTasks(
db: WritableDB,
tasks: Array<SyncTaskType>
): void {
return db.transaction(() => {
2024-07-22 18:16:33 +00:00
tasks.forEach(task => saveSyncTask(db, task));
})();
}
2024-07-22 18:16:33 +00:00
function saveSyncTask(db: WritableDB, task: SyncTaskType): void {
const { id, attempts, createdAt, data, envelopeId, sentAt, type } = task;
const [query, parameters] = sql`
INSERT INTO syncTasks (
id,
attempts,
createdAt,
data,
envelopeId,
sentAt,
type
) VALUES (
${id},
${attempts},
${createdAt},
${objectToJSON(data)},
${envelopeId},
${sentAt},
${type}
)
`;
db.prepare(query).run(parameters);
}
2024-07-22 18:16:33 +00:00
export function getAllSyncTasks(db: WritableDB): Array<SyncTaskType> {
return db.transaction(() => {
const [selectAllQuery] = sql`
SELECT * FROM syncTasks ORDER BY createdAt ASC, sentAt ASC, id ASC
`;
const rows = db.prepare(selectAllQuery).all();
const tasks: Array<SyncTaskType> = rows.map(row => ({
...row,
data: jsonToObject(row.data),
}));
const [query] = sql`
UPDATE syncTasks
SET attempts = attempts + 1
`;
db.prepare(query).run();
const [toDelete, toReturn] = partition(tasks, task => {
if (
isNormalNumber(task.attempts) &&
task.attempts < MAX_SYNC_TASK_ATTEMPTS
) {
return false;
}
if (isMoreRecentThan(task.createdAt, durations.WEEK)) {
return false;
}
return true;
});
if (toDelete.length > 0) {
log.warn(`getAllSyncTasks: Removing ${toDelete.length} expired tasks`);
toDelete.forEach(task => {
2024-07-22 18:16:33 +00:00
removeSyncTaskById(db, task.id);
});
}
return toReturn;
})();
}
2024-07-22 18:16:33 +00:00
export function saveMessage(
db: WritableDB,
data: ReadonlyDeep<MessageType>,
2021-10-26 22:59:08 +00:00
options: {
alreadyInTransaction?: boolean;
2021-12-20 21:04:02 +00:00
forceSave?: boolean;
jobToInsert?: StoredJob;
ourAci: AciString;
2021-12-20 21:04:02 +00:00
}
): string {
2023-10-03 00:27:02 +00:00
const { alreadyInTransaction, forceSave, jobToInsert, ourAci } = options;
if (!alreadyInTransaction) {
return db.transaction(() => {
2024-07-22 18:16:33 +00:00
return saveMessage(db, data, {
...options,
alreadyInTransaction: true,
});
})();
}
const {
2019-01-14 21:47:19 +00:00
body,
conversationId,
2021-12-20 21:04:02 +00:00
groupV2Change,
hasAttachments,
hasFileAttachments,
hasVisualMediaAttachments,
id,
2019-06-26 19:33:13 +00:00
isErased,
2019-08-05 20:53:15 +00:00
isViewOnce,
mentionsMe,
received_at,
schemaVersion,
sent_at,
serverGuid,
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
storyId,
type,
readStatus,
expireTimer,
expirationStartTimestamp,
attachments,
} = data;
let { seenStatus } = data;
if (attachments) {
strictAssert(
attachments.every(
attachment =>
!attachment.data &&
!attachment.screenshotData &&
!attachment.screenshot?.data &&
!attachment.thumbnail?.data
),
'Attempting to save a message with binary attachment data'
);
}
if (readStatus === ReadStatus.Unread && seenStatus !== SeenStatus.Unseen) {
log.warn(
`saveMessage: Message ${id}/${type} is unread but had seenStatus=${seenStatus}. Forcing to UnseenStatus.Unseen.`
);
// eslint-disable-next-line no-param-reassign
data = {
...data,
seenStatus: SeenStatus.Unseen,
};
seenStatus = SeenStatus.Unseen;
}
const payload = {
id,
json: objectToJSON(data),
body: body || null,
conversationId,
expirationStartTimestamp: expirationStartTimestamp || null,
expireTimer: expireTimer || null,
hasAttachments: hasAttachments ? 1 : 0,
hasFileAttachments: hasFileAttachments ? 1 : 0,
hasVisualMediaAttachments: hasVisualMediaAttachments ? 1 : 0,
isChangeCreatedByUs: groupV2Change?.from === ourAci ? 1 : 0,
isErased: isErased ? 1 : 0,
isViewOnce: isViewOnce ? 1 : 0,
mentionsMe: mentionsMe ? 1 : 0,
received_at: received_at || null,
schemaVersion: schemaVersion || 0,
serverGuid: serverGuid || null,
sent_at: sent_at || null,
source: source || null,
2023-08-16 20:54:39 +00:00
sourceServiceId: sourceServiceId || null,
sourceDevice: sourceDevice || null,
storyId: storyId || null,
type: type || null,
readStatus: readStatus ?? null,
seenStatus: seenStatus ?? SeenStatus.NotApplicable,
};
if (id && !forceSave) {
prepare(
db,
`
UPDATE messages SET
id = $id,
json = $json,
body = $body,
conversationId = $conversationId,
expirationStartTimestamp = $expirationStartTimestamp,
expireTimer = $expireTimer,
hasAttachments = $hasAttachments,
hasFileAttachments = $hasFileAttachments,
hasVisualMediaAttachments = $hasVisualMediaAttachments,
2021-12-20 21:04:02 +00:00
isChangeCreatedByUs = $isChangeCreatedByUs,
isErased = $isErased,
isViewOnce = $isViewOnce,
mentionsMe = $mentionsMe,
received_at = $received_at,
schemaVersion = $schemaVersion,
serverGuid = $serverGuid,
sent_at = $sent_at,
source = $source,
2023-08-16 20:54:39 +00:00
sourceServiceId = $sourceServiceId,
sourceDevice = $sourceDevice,
storyId = $storyId,
type = $type,
readStatus = $readStatus,
seenStatus = $seenStatus
WHERE id = $id;
`
).run(payload);
if (jobToInsert) {
2024-07-22 18:16:33 +00:00
insertJob(db, jobToInsert);
}
return id;
}
const toCreate = {
...data,
id: id || generateUuid(),
};
prepare(
db,
`
INSERT INTO messages (
id,
json,
body,
conversationId,
expirationStartTimestamp,
expireTimer,
hasAttachments,
hasFileAttachments,
hasVisualMediaAttachments,
2021-12-20 21:04:02 +00:00
isChangeCreatedByUs,
isErased,
isViewOnce,
mentionsMe,
received_at,
schemaVersion,
serverGuid,
sent_at,
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
storyId,
type,
readStatus,
seenStatus
) values (
$id,
$json,
$body,
$conversationId,
$expirationStartTimestamp,
$expireTimer,
$hasAttachments,
$hasFileAttachments,
$hasVisualMediaAttachments,
2021-12-20 21:04:02 +00:00
$isChangeCreatedByUs,
$isErased,
$isViewOnce,
$mentionsMe,
$received_at,
$schemaVersion,
$serverGuid,
$sent_at,
$source,
2023-08-16 20:54:39 +00:00
$sourceServiceId,
$sourceDevice,
$storyId,
$type,
$readStatus,
$seenStatus
2021-07-09 21:38:51 +00:00
);
`
).run({
...payload,
id: toCreate.id,
json: objectToJSON(toCreate),
});
if (jobToInsert) {
2024-07-22 18:16:33 +00:00
insertJob(db, jobToInsert);
}
return toCreate.id;
}
2024-07-22 18:16:33 +00:00
function saveMessages(
db: WritableDB,
arrayOfMessages: ReadonlyArray<ReadonlyDeep<MessageType>>,
options: { forceSave?: boolean; ourAci: AciString }
2024-07-22 18:16:33 +00:00
): Array<string> {
2024-06-03 17:02:25 +00:00
return db.transaction(() => {
const result = new Array<string>();
for (const message of arrayOfMessages) {
2024-06-03 17:02:25 +00:00
result.push(
2024-07-22 18:16:33 +00:00
saveMessage(db, message, {
...options,
alreadyInTransaction: true,
})
);
}
2024-06-03 17:02:25 +00:00
return result;
})();
}
2024-07-22 18:16:33 +00:00
function removeMessage(db: WritableDB, id: string): void {
db.prepare<Query>('DELETE FROM messages WHERE id = $id;').run({ id });
2021-01-13 00:42:15 +00:00
}
2024-07-22 18:16:33 +00:00
function removeMessagesBatch(db: WritableDB, ids: ReadonlyArray<string>): void {
db.prepare<ArrayQuery>(
`
DELETE FROM messages
WHERE id IN ( ${ids.map(() => '?').join(', ')} );
`
).run(ids);
}
2024-07-22 18:16:33 +00:00
function removeMessages(db: WritableDB, ids: ReadonlyArray<string>): void {
batchMultiVarQuery(db, ids, batch => removeMessagesBatch(db, batch));
2022-02-16 18:36:21 +00:00
}
2024-07-22 18:16:33 +00:00
export function getMessageById(
db: ReadableDB,
2022-02-16 18:36:21 +00:00
id: string
): MessageType | undefined {
const row = db
.prepare<Query>('SELECT json FROM messages WHERE id = $id;')
.get({
id,
});
if (!row) {
return undefined;
}
return jsonToObject(row.json);
}
2024-07-22 18:16:33 +00:00
function getMessagesById(
db: ReadableDB,
2023-03-20 22:23:53 +00:00
messageIds: ReadonlyArray<string>
2024-07-22 18:16:33 +00:00
): Array<MessageType> {
return batchMultiVarQuery(
2021-10-26 22:59:08 +00:00
db,
messageIds,
(batch: ReadonlyArray<string>): Array<MessageType> => {
const query = db.prepare<ArrayQuery>(
`SELECT json FROM messages WHERE id IN (${Array(batch.length)
.fill('?')
.join(',')});`
);
const rows: JSONRows = query.all(batch);
return rows.map(row => jsonToObject(row.json));
}
);
}
2024-07-22 18:16:33 +00:00
function _getAllMessages(db: ReadableDB): Array<MessageType> {
const rows: JSONRows = db
.prepare<EmptyQuery>('SELECT json FROM messages ORDER BY id ASC;')
.all();
return rows.map(row => jsonToObject(row.json));
2018-09-21 01:47:19 +00:00
}
2024-07-22 18:16:33 +00:00
function _removeAllMessages(db: WritableDB): void {
2023-01-26 23:53:22 +00:00
db.exec(`
DELETE FROM messages;
2023-10-10 23:55:32 +00:00
INSERT INTO messages_fts(messages_fts) VALUES('optimize');
2023-01-26 23:53:22 +00:00
`);
}
2018-09-21 01:47:19 +00:00
2024-07-22 18:16:33 +00:00
function getAllMessageIds(db: ReadableDB): Array<string> {
const rows: Array<{ id: string }> = db
.prepare<EmptyQuery>('SELECT id FROM messages ORDER BY id ASC;')
.all();
return rows.map(row => row.id);
}
2024-07-22 18:16:33 +00:00
function getMessageBySender(
db: ReadableDB,
{
source,
sourceServiceId,
sourceDevice,
sent_at,
}: {
source?: string;
sourceServiceId?: ServiceIdString;
sourceDevice?: number;
sent_at: number;
}
): MessageType | undefined {
const rows: JSONRows = prepare(
db,
`
SELECT json FROM messages WHERE
2023-08-16 20:54:39 +00:00
(source = $source OR sourceServiceId = $sourceServiceId) AND
sourceDevice = $sourceDevice AND
sent_at = $sent_at
LIMIT 2;
`
).all({
source: source || null,
2023-08-16 20:54:39 +00:00
sourceServiceId: sourceServiceId || null,
sourceDevice: sourceDevice || null,
sent_at,
});
if (rows.length > 1) {
log.warn('getMessageBySender: More than one message found for', {
sent_at,
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
});
}
if (rows.length < 1) {
return undefined;
}
return jsonToObject(rows[0].json);
}
2022-04-20 23:33:38 +00:00
export function _storyIdPredicate(
storyId: string | undefined,
includeStoryReplies: boolean
2023-03-20 22:23:53 +00:00
): QueryFragment {
// This is unintuitive, but 'including story replies' means that we need replies to
// lots of different stories. So, we remove the storyId check with a clause that will
// always be true. We don't just return TRUE because we want to use our passed-in
// $storyId parameter.
if (includeStoryReplies && storyId === undefined) {
2023-03-20 22:23:53 +00:00
return sqlFragment`${storyId} IS NULL`;
}
2023-01-01 11:41:40 +00:00
// In contrast to: replies to a specific story
2023-03-20 22:23:53 +00:00
return sqlFragment`storyId IS ${storyId}`;
}
2024-07-22 18:16:33 +00:00
function getUnreadByConversationAndMarkRead(
db: WritableDB,
{
conversationId,
includeStoryReplies,
newestUnreadAt,
storyId,
readAt,
now = Date.now(),
}: {
conversationId: string;
includeStoryReplies: boolean;
newestUnreadAt: number;
storyId?: string;
readAt?: number;
now?: number;
}
): GetUnreadByConversationAndMarkReadResultType {
return db.transaction(() => {
const expirationStartTimestamp = Math.min(now, readAt ?? Infinity);
2023-03-20 22:23:53 +00:00
const expirationJsonPatch = JSON.stringify({ expirationStartTimestamp });
const [updateExpirationQuery, updateExpirationParams] = sql`
2021-06-16 22:20:17 +00:00
UPDATE messages
2021-06-29 16:45:31 +00:00
INDEXED BY expiring_message_by_conversation_and_received_at
2021-06-16 22:20:17 +00:00
SET
2023-03-20 22:23:53 +00:00
expirationStartTimestamp = ${expirationStartTimestamp},
json = json_patch(json, ${expirationJsonPatch})
2021-06-16 22:20:17 +00:00
WHERE
2023-03-20 22:23:53 +00:00
conversationId = ${conversationId} AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
isStory IS 0 AND
type IS 'incoming' AND
2021-06-16 22:20:17 +00:00
(
expirationStartTimestamp IS NULL OR
2023-03-20 22:23:53 +00:00
expirationStartTimestamp > ${expirationStartTimestamp}
2021-06-16 22:20:17 +00:00
) AND
2021-12-20 21:04:02 +00:00
expireTimer > 0 AND
2023-03-20 22:23:53 +00:00
received_at <= ${newestUnreadAt};
`;
2021-06-16 22:20:17 +00:00
2023-03-20 22:23:53 +00:00
db.prepare(updateExpirationQuery).run(updateExpirationParams);
const [selectQuery, selectParams] = sql`
SELECT id, json FROM messages
2021-06-29 16:45:31 +00:00
WHERE
2023-03-20 22:23:53 +00:00
conversationId = ${conversationId} AND
seenStatus = ${SeenStatus.Unseen} AND
isStory = 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
2023-03-20 22:23:53 +00:00
received_at <= ${newestUnreadAt}
ORDER BY received_at DESC, sent_at DESC;
2023-03-20 22:23:53 +00:00
`;
2023-03-20 22:23:53 +00:00
const rows = db.prepare(selectQuery).all(selectParams);
const statusJsonPatch = JSON.stringify({
readStatus: ReadStatus.Read,
seenStatus: SeenStatus.Seen,
});
const [updateStatusQuery, updateStatusParams] = sql`
UPDATE messages
2021-05-07 21:50:14 +00:00
SET
readStatus = ${ReadStatus.Read},
seenStatus = ${SeenStatus.Seen},
2023-03-20 22:23:53 +00:00
json = json_patch(json, ${statusJsonPatch})
2021-05-07 21:50:14 +00:00
WHERE
2023-03-20 22:23:53 +00:00
conversationId = ${conversationId} AND
seenStatus = ${SeenStatus.Unseen} AND
isStory = 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
2023-03-20 22:23:53 +00:00
received_at <= ${newestUnreadAt};
`;
db.prepare(updateStatusQuery).run(updateStatusParams);
return rows.map(row => {
2021-10-26 22:59:08 +00:00
const json = jsonToObject<MessageType>(row.json);
return {
originalReadStatus: json.readStatus,
readStatus: ReadStatus.Read,
seenStatus: SeenStatus.Seen,
2021-06-16 22:20:17 +00:00
...pick(json, [
'expirationStartTimestamp',
'id',
'sent_at',
'source',
2023-08-16 20:54:39 +00:00
'sourceServiceId',
2021-06-16 22:20:17 +00:00
'type',
]),
};
});
})();
}
2024-07-22 18:16:33 +00:00
function getUnreadReactionsAndMarkRead(
db: WritableDB,
{
conversationId,
newestUnreadAt,
storyId,
}: {
conversationId: string;
newestUnreadAt: number;
storyId?: string;
}
): Array<ReactionResultType> {
return db.transaction(() => {
const unreadMessages: Array<ReactionResultType> = db
.prepare<Query>(
`
2023-08-16 20:54:39 +00:00
SELECT reactions.rowid, targetAuthorAci, targetTimestamp, messageId
FROM reactions
2023-01-17 21:07:21 +00:00
INDEXED BY reactions_unread
JOIN messages on messages.id IS reactions.messageId
WHERE
2023-01-17 21:07:21 +00:00
reactions.conversationId IS $conversationId AND
reactions.unread > 0 AND
messages.received_at <= $newestUnreadAt AND
messages.storyId IS $storyId
ORDER BY messageReceivedAt DESC;
`
)
.all({
conversationId,
newestUnreadAt,
storyId: storyId || null,
});
const idsToUpdate = unreadMessages.map(item => item.rowid);
batchMultiVarQuery(db, idsToUpdate, (ids: ReadonlyArray<number>): void => {
db.prepare<ArrayQuery>(
`
2023-01-17 21:07:21 +00:00
UPDATE reactions
SET unread = 0
WHERE rowid IN ( ${ids.map(() => '?').join(', ')} );
`
).run(ids);
});
return unreadMessages;
})();
}
2024-07-22 18:16:33 +00:00
function markReactionAsRead(
db: WritableDB,
targetAuthorServiceId: ServiceIdString,
targetTimestamp: number
2024-07-22 18:16:33 +00:00
): ReactionType | undefined {
return db.transaction(() => {
const readReaction = db
.prepare(
`
SELECT *
FROM reactions
WHERE
2023-08-16 20:54:39 +00:00
targetAuthorAci = $targetAuthorAci AND
targetTimestamp = $targetTimestamp AND
unread = 1
ORDER BY rowId DESC
LIMIT 1;
`
)
.get({
2023-08-16 20:54:39 +00:00
targetAuthorAci: targetAuthorServiceId,
targetTimestamp,
});
db.prepare(
`
UPDATE reactions SET
unread = 0 WHERE
2023-08-16 20:54:39 +00:00
targetAuthorAci = $targetAuthorAci AND
targetTimestamp = $targetTimestamp;
`
).run({
2023-08-16 20:54:39 +00:00
targetAuthorAci: targetAuthorServiceId,
targetTimestamp,
});
return readReaction;
})();
}
2024-07-22 18:16:33 +00:00
function getReactionByTimestamp(
db: ReadableDB,
fromId: string,
timestamp: number
2024-07-22 18:16:33 +00:00
): ReactionType | undefined {
const [query, params] = sql`
SELECT * FROM reactions
WHERE fromId IS ${fromId} AND timestamp IS ${timestamp}
`;
return db.prepare(query).get(params);
}
2024-07-22 18:16:33 +00:00
function addReaction(
db: WritableDB,
{
conversationId,
emoji,
fromId,
messageId,
messageReceivedAt,
targetAuthorAci,
targetTimestamp,
timestamp,
}: ReactionType,
{ readStatus }: { readStatus: ReactionReadStatus }
2024-07-22 18:16:33 +00:00
): void {
db.prepare(
`INSERT INTO reactions (
conversationId,
emoji,
fromId,
messageId,
messageReceivedAt,
2023-08-16 20:54:39 +00:00
targetAuthorAci,
targetTimestamp,
timestamp,
unread
) VALUES (
$conversationId,
$emoji,
$fromId,
$messageId,
$messageReceivedAt,
2023-08-16 20:54:39 +00:00
$targetAuthorAci,
$targetTimestamp,
$timestamp,
$unread
);`
2024-07-22 18:16:33 +00:00
).run({
conversationId,
emoji,
fromId,
messageId,
messageReceivedAt,
targetAuthorAci,
targetTimestamp,
timestamp,
unread: readStatus === ReactionReadStatus.Unread ? 1 : 0,
});
}
2024-07-22 18:16:33 +00:00
function removeReactionFromConversation(
db: WritableDB,
{
emoji,
fromId,
targetAuthorServiceId,
targetTimestamp,
}: {
emoji: string;
fromId: string;
targetAuthorServiceId: ServiceIdString;
targetTimestamp: number;
}
): void {
db.prepare(
`DELETE FROM reactions WHERE
emoji = $emoji AND
fromId = $fromId AND
2023-08-16 20:54:39 +00:00
targetAuthorAci = $targetAuthorAci AND
targetTimestamp = $targetTimestamp;`
2024-07-22 18:16:33 +00:00
).run({
emoji,
fromId,
targetAuthorAci: targetAuthorServiceId,
targetTimestamp,
});
}
2024-07-22 18:16:33 +00:00
function _getAllReactions(db: ReadableDB): Array<ReactionType> {
return db.prepare<EmptyQuery>('SELECT * from reactions;').all();
}
2024-07-22 18:16:33 +00:00
function _removeAllReactions(db: WritableDB): void {
db.prepare<EmptyQuery>('DELETE from reactions;').run();
}
2023-03-04 03:03:15 +00:00
enum AdjacentDirection {
Older = 'Older',
Newer = 'Newer',
}
2023-03-04 03:03:15 +00:00
// This function needs to pull story replies from all conversations, because when we send
// a story to one or more distribution lists, each reply to it will be in the sender's
// 1:1 conversation with us.
2024-07-22 18:16:33 +00:00
function getRecentStoryReplies(
db: ReadableDB,
storyId: string,
{
limit = 100,
messageId,
receivedAt = Number.MAX_VALUE,
sentAt = Number.MAX_VALUE,
}: GetRecentStoryRepliesOptionsType = {}
): Array<MessageTypeUnhydrated> {
const timeFilters = {
first: sqlFragment`received_at = ${receivedAt} AND sent_at < ${sentAt}`,
second: sqlFragment`received_at < ${receivedAt}`,
};
const createQuery = (timeFilter: QueryFragment): QueryFragment => sqlFragment`
SELECT json FROM messages WHERE
(${messageId} IS NULL OR id IS NOT ${messageId}) AND
isStory IS 0 AND
storyId IS ${storyId} AND
(
${timeFilter}
)
ORDER BY received_at DESC, sent_at DESC
`;
const template = sqlFragment`
SELECT first.json FROM (${createQuery(timeFilters.first)}) as first
UNION ALL
SELECT second.json FROM (${createQuery(timeFilters.second)}) as second
`;
const [query, params] = sql`${template} LIMIT ${limit}`;
return db.prepare(query).all(params);
}
2024-07-22 18:16:33 +00:00
function getAdjacentMessagesByConversation(
db: ReadableDB,
2023-03-04 03:03:15 +00:00
direction: AdjacentDirection,
{
2023-03-04 03:03:15 +00:00
conversationId,
includeStoryReplies,
limit = 100,
messageId,
2023-03-04 03:03:15 +00:00
receivedAt = direction === AdjacentDirection.Older ? Number.MAX_VALUE : 0,
sentAt = direction === AdjacentDirection.Older ? Number.MAX_VALUE : 0,
requireVisualMediaAttachments,
storyId,
2023-03-04 03:03:15 +00:00
}: AdjacentMessagesByConversationOptionsType
): Array<MessageTypeUnhydrated> {
let timeFilters: { first: QueryFragment; second: QueryFragment };
let timeOrder: QueryFragment;
2023-03-04 03:03:15 +00:00
if (direction === AdjacentDirection.Older) {
timeFilters = {
first: sqlFragment`received_at = ${receivedAt} AND sent_at < ${sentAt}`,
second: sqlFragment`received_at < ${receivedAt}`,
};
timeOrder = sqlFragment`DESC`;
} else {
timeFilters = {
first: sqlFragment`received_at = ${receivedAt} AND sent_at > ${sentAt}`,
second: sqlFragment`received_at > ${receivedAt}`,
};
timeOrder = sqlFragment`ASC`;
}
2023-03-04 03:03:15 +00:00
const requireDifferentMessage =
direction === AdjacentDirection.Older || requireVisualMediaAttachments;
const createQuery = (timeFilter: QueryFragment): QueryFragment => sqlFragment`
2023-03-04 03:03:15 +00:00
SELECT json FROM messages WHERE
2023-03-20 22:23:53 +00:00
conversationId = ${conversationId} AND
2023-03-04 03:03:15 +00:00
${
requireDifferentMessage
2023-03-20 22:23:53 +00:00
? sqlFragment`(${messageId} IS NULL OR id IS NOT ${messageId}) AND`
: sqlFragment``
2023-03-04 03:03:15 +00:00
}
${
requireVisualMediaAttachments
2023-03-20 22:23:53 +00:00
? sqlFragment`hasVisualMediaAttachments IS 1 AND`
: sqlFragment``
2023-03-04 03:03:15 +00:00
}
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
(
${timeFilter}
)
ORDER BY received_at ${timeOrder}, sent_at ${timeOrder}
`;
let template = sqlFragment`
SELECT first.json FROM (${createQuery(timeFilters.first)}) as first
UNION ALL
SELECT second.json FROM (${createQuery(timeFilters.second)}) as second
2023-03-04 03:03:15 +00:00
`;
// See `filterValidAttachments` in ts/state/ducks/lightbox.ts
if (requireVisualMediaAttachments) {
2023-03-20 22:23:53 +00:00
template = sqlFragment`
2023-03-04 03:03:15 +00:00
SELECT json
2023-03-20 22:23:53 +00:00
FROM (${template}) as messages
2023-03-04 03:03:15 +00:00
WHERE
(
2023-03-04 03:03:15 +00:00
SELECT COUNT(*)
FROM json_each(messages.json ->> 'attachments') AS attachment
WHERE
attachment.value ->> 'thumbnail' IS NOT NULL AND
attachment.value ->> 'pending' IS NOT 1 AND
attachment.value ->> 'error' IS NULL
) > 0
2023-03-20 22:23:53 +00:00
LIMIT ${limit};
2023-03-04 03:03:15 +00:00
`;
} else {
2023-03-20 22:23:53 +00:00
template = sqlFragment`${template} LIMIT ${limit}`;
2023-03-04 03:03:15 +00:00
}
2023-03-20 22:23:53 +00:00
const [query, params] = sql`${template}`;
const results = db.prepare(query).all(params);
2023-03-04 03:03:15 +00:00
if (direction === AdjacentDirection.Older) {
results.reverse();
}
return results;
}
2024-07-22 18:16:33 +00:00
function getOlderMessagesByConversation(
db: ReadableDB,
2023-03-04 03:03:15 +00:00
options: AdjacentMessagesByConversationOptionsType
2024-07-22 18:16:33 +00:00
): Array<MessageTypeUnhydrated> {
return getAdjacentMessagesByConversation(
db,
2023-03-04 03:03:15 +00:00
AdjacentDirection.Older,
options
);
}
2024-07-22 18:16:33 +00:00
function getAllStories(
db: ReadableDB,
{
conversationId,
sourceServiceId,
}: {
conversationId?: string;
sourceServiceId?: ServiceIdString;
}
): GetAllStoriesResultType {
2022-11-28 17:19:48 +00:00
const rows: ReadonlyArray<{
json: string;
hasReplies: number;
hasRepliesFromSelf: number;
}> = db
.prepare<Query>(
`
2022-11-28 17:19:48 +00:00
SELECT
json,
(SELECT EXISTS(
SELECT 1
FROM messages as replies
WHERE replies.storyId IS messages.id
)) as hasReplies,
(SELECT EXISTS(
SELECT 1
FROM messages AS selfReplies
WHERE
selfReplies.storyId IS messages.id AND
selfReplies.type IS 'outgoing'
)) as hasRepliesFromSelf
FROM messages
WHERE
type IS 'story' AND
($conversationId IS NULL OR conversationId IS $conversationId) AND
2023-08-16 20:54:39 +00:00
($sourceServiceId IS NULL OR sourceServiceId IS $sourceServiceId)
ORDER BY received_at ASC, sent_at ASC;
`
)
.all({
conversationId: conversationId || null,
2023-08-16 20:54:39 +00:00
sourceServiceId: sourceServiceId || null,
});
2022-11-28 17:19:48 +00:00
return rows.map(row => ({
...jsonToObject(row.json),
hasReplies: row.hasReplies !== 0,
hasRepliesFromSelf: row.hasRepliesFromSelf !== 0,
}));
}
2024-07-22 18:16:33 +00:00
function getNewerMessagesByConversation(
db: ReadableDB,
2023-03-04 03:03:15 +00:00
options: AdjacentMessagesByConversationOptionsType
2024-07-22 18:16:33 +00:00
): Array<MessageTypeUnhydrated> {
return getAdjacentMessagesByConversation(
db,
2023-03-04 03:03:15 +00:00
AdjacentDirection.Newer,
options
);
}
function getOldestMessageForConversation(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
2023-03-04 03:03:15 +00:00
storyId?: string;
includeStoryReplies: boolean;
}
): MessageMetricsType | undefined {
2023-03-20 22:23:53 +00:00
const [query, params] = sql`
SELECT received_at, sent_at, id FROM messages WHERE
conversationId = ${conversationId} AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
ORDER BY received_at ASC, sent_at ASC
LIMIT 1;
2023-03-20 22:23:53 +00:00
`;
const row = db.prepare(query).get(params);
if (!row) {
return undefined;
}
return row;
}
function getNewestMessageForConversation(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
2023-03-04 03:03:15 +00:00
storyId?: string;
includeStoryReplies: boolean;
}
): MessageMetricsType | undefined {
2023-03-20 22:23:53 +00:00
const [query, params] = sql`
SELECT received_at, sent_at, id FROM messages WHERE
conversationId = ${conversationId} AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
ORDER BY received_at DESC, sent_at DESC
LIMIT 1;
2023-03-20 22:23:53 +00:00
`;
const row = db.prepare(query).get(params);
if (!row) {
return undefined;
}
return row;
}
2020-08-07 00:50:54 +00:00
2023-03-20 22:23:53 +00:00
export type GetMessagesBetweenOptions = Readonly<{
after: { received_at: number; sent_at: number };
before: { received_at: number; sent_at: number };
includeStoryReplies: boolean;
}>;
2024-07-22 18:16:33 +00:00
function getMessagesBetween(
db: ReadableDB,
2023-03-20 22:23:53 +00:00
conversationId: string,
options: GetMessagesBetweenOptions
2024-07-22 18:16:33 +00:00
): Array<string> {
2023-03-20 22:23:53 +00:00
// In the future we could accept this as an option, but for now we just
// use it for the story predicate.
const storyId = undefined;
const { after, before, includeStoryReplies } = options;
const [query, params] = sql`
SELECT id
FROM messages
WHERE
conversationId = ${conversationId} AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
isStory IS 0 AND
(
received_at > ${after.received_at}
OR (received_at = ${after.received_at} AND sent_at > ${after.sent_at})
) AND (
received_at < ${before.received_at}
OR (received_at = ${before.received_at} AND sent_at < ${before.sent_at})
)
ORDER BY received_at ASC, sent_at ASC;
`;
const rows = db.prepare(query).all(params);
return rows.map(row => row.id);
}
/**
* Given a set of deleted message IDs, find a message in the conversation that
* is close to the set. Searching from the last selected message as a starting
* point.
*/
2024-07-22 18:16:33 +00:00
function getNearbyMessageFromDeletedSet(
db: ReadableDB,
{
conversationId,
lastSelectedMessage,
deletedMessageIds,
storyId,
includeStoryReplies,
}: GetNearbyMessageFromDeletedSetOptionsType
): string | null {
2023-03-20 22:23:53 +00:00
function runQuery(after: boolean) {
const dir = after ? sqlFragment`ASC` : sqlFragment`DESC`;
const compare = after ? sqlFragment`>` : sqlFragment`<`;
const { received_at, sent_at } = lastSelectedMessage;
const [query, params] = sql`
SELECT id FROM messages WHERE
conversationId = ${conversationId} AND
(${_storyIdPredicate(storyId, includeStoryReplies)}) AND
isStory IS 0 AND
2023-11-22 22:48:53 +00:00
id NOT IN (${sqlJoin(deletedMessageIds)}) AND
2023-03-20 22:23:53 +00:00
type IN ('incoming', 'outgoing')
AND (
(received_at = ${received_at} AND sent_at ${compare} ${sent_at}) OR
received_at ${compare} ${received_at}
)
ORDER BY received_at ${dir}, sent_at ${dir}
LIMIT 1
`;
return db.prepare(query).pluck().get(params);
}
const after = runQuery(true);
if (after != null) {
return after;
}
const before = runQuery(false);
if (before != null) {
return before;
}
return null;
}
2024-07-22 18:16:33 +00:00
function getLastConversationActivity(
db: ReadableDB,
{
conversationId,
includeStoryReplies,
}: {
conversationId: string;
includeStoryReplies: boolean;
}
): MessageType | undefined {
const row = prepare(
db,
`
SELECT json FROM messages
2023-01-17 21:07:21 +00:00
INDEXED BY messages_activity
WHERE
2023-01-17 21:07:21 +00:00
conversationId IS $conversationId AND
2021-12-20 21:04:02 +00:00
shouldAffectActivity IS 1 AND
isTimerChangeFromSync IS 0 AND
${includeStoryReplies ? '' : 'storyId IS NULL AND'}
2021-12-20 21:04:02 +00:00
isGroupLeaveEventFromOther IS 0
ORDER BY received_at DESC, sent_at DESC
LIMIT 1;
`
).get({
conversationId,
});
2020-08-07 00:50:54 +00:00
if (!row) {
return undefined;
2020-08-07 00:50:54 +00:00
}
return jsonToObject(row.json);
2020-08-07 00:50:54 +00:00
}
2024-07-22 18:16:33 +00:00
function getLastConversationPreview(
db: ReadableDB,
{
conversationId,
includeStoryReplies,
}: {
conversationId: string;
includeStoryReplies: boolean;
}
): MessageType | undefined {
type Row = Readonly<{
json: string;
}>;
2023-01-27 17:47:24 +00:00
const index = includeStoryReplies
? 'messages_preview'
: 'messages_preview_without_story';
const row: Row | undefined = prepare(
db,
`
2023-01-27 17:47:24 +00:00
SELECT json FROM (
SELECT json, expiresAt FROM messages
INDEXED BY ${index}
WHERE
conversationId IS $conversationId AND
shouldAffectPreview IS 1 AND
isGroupLeaveEventFromOther IS 0
${includeStoryReplies ? '' : 'AND storyId IS NULL'}
ORDER BY received_at DESC, sent_at DESC
)
WHERE likely(expiresAt > $now)
LIMIT 1
`
).get({
conversationId,
now: Date.now(),
});
2020-08-07 00:50:54 +00:00
return row ? jsonToObject(row.json) : undefined;
2020-08-07 00:50:54 +00:00
}
2024-07-22 18:16:33 +00:00
function getConversationMessageStats(
db: ReadableDB,
{
conversationId,
includeStoryReplies,
}: {
conversationId: string;
includeStoryReplies: boolean;
}
): ConversationMessageStatsType {
return db.transaction(() => {
return {
2024-07-22 18:16:33 +00:00
activity: getLastConversationActivity(db, {
conversationId,
includeStoryReplies,
}),
2024-07-22 18:16:33 +00:00
preview: getLastConversationPreview(db, {
conversationId,
includeStoryReplies,
}),
2024-07-22 18:16:33 +00:00
hasUserInitiatedMessages: hasUserInitiatedMessages(db, conversationId),
};
})();
}
2024-07-22 18:16:33 +00:00
function getLastConversationMessage(
db: ReadableDB,
{
conversationId,
}: {
conversationId: string;
}
): MessageType | undefined {
const row = db
.prepare<Query>(
`
2023-01-27 17:47:24 +00:00
SELECT json FROM messages WHERE
conversationId = $conversationId
ORDER BY received_at DESC, sent_at DESC
LIMIT 1;
`
)
.get({
conversationId,
});
if (!row) {
return undefined;
}
return jsonToObject(row.json);
}
function getOldestUnseenMessageForConversation(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
2023-03-04 03:03:15 +00:00
storyId?: string;
includeStoryReplies: boolean;
}
): MessageMetricsType | undefined {
2023-03-20 22:23:53 +00:00
const [query, params] = sql`
SELECT received_at, sent_at, id FROM messages WHERE
conversationId = ${conversationId} AND
seenStatus = ${SeenStatus.Unseen} AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
ORDER BY received_at ASC, sent_at ASC
LIMIT 1;
`;
const row = db.prepare(query).get(params);
if (!row) {
return undefined;
}
return row;
}
2024-07-22 18:16:33 +00:00
function getOldestUnreadMentionOfMeForConversation(
db: ReadableDB,
conversationId: string,
options: {
storyId?: string;
includeStoryReplies: boolean;
}
): MessageMetricsType | undefined {
const [query, params] = sql`
SELECT received_at, sent_at, id FROM messages WHERE
conversationId = ${conversationId} AND
readStatus = ${ReadStatus.Unread} AND
mentionsMe IS 1 AND
isStory IS 0 AND
(${_storyIdPredicate(options.storyId, options.includeStoryReplies)})
ORDER BY received_at ASC, sent_at ASC
LIMIT 1;
`;
return db.prepare(query).get(params);
}
2024-07-22 18:16:33 +00:00
function getTotalUnreadForConversation(
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
2023-03-04 03:03:15 +00:00
storyId: string | undefined;
includeStoryReplies: boolean;
}
): number {
2023-03-20 22:23:53 +00:00
const [query, params] = sql`
SELECT count(1)
FROM messages
WHERE
conversationId = ${conversationId} AND
readStatus = ${ReadStatus.Unread} AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
`;
const row = db.prepare(query).pluck().get(params);
2023-01-17 21:07:21 +00:00
return row;
}
2024-07-22 18:16:33 +00:00
function getTotalUnreadMentionsOfMeForConversation(
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
storyId?: string;
includeStoryReplies: boolean;
}
): number {
const [query, params] = sql`
SELECT count(1)
FROM messages
WHERE
conversationId = ${conversationId} AND
readStatus = ${ReadStatus.Unread} AND
mentionsMe IS 1 AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
`;
const row = db.prepare(query).pluck().get(params);
return row;
}
2024-07-22 18:16:33 +00:00
function getTotalUnseenForConversation(
db: ReadableDB,
conversationId: string,
{
storyId,
includeStoryReplies,
}: {
2023-03-04 03:03:15 +00:00
storyId?: string;
includeStoryReplies: boolean;
}
): number {
2023-03-20 22:23:53 +00:00
const [query, params] = sql`
SELECT count(1)
FROM messages
WHERE
2023-03-20 22:23:53 +00:00
conversationId = ${conversationId} AND
seenStatus = ${SeenStatus.Unseen} AND
isStory IS 0 AND
(${_storyIdPredicate(storyId, includeStoryReplies)})
2023-03-20 22:23:53 +00:00
`;
const row = db.prepare(query).pluck().get(params);
2023-01-17 21:07:21 +00:00
return row;
}
2024-07-22 18:16:33 +00:00
function getMessageMetricsForConversation(
db: ReadableDB,
options: {
conversationId: string;
storyId?: string;
includeStoryReplies: boolean;
}
): ConversationMetricsType {
2023-03-04 03:03:15 +00:00
const { conversationId } = options;
2024-07-22 18:16:33 +00:00
const oldest = getOldestMessageForConversation(db, conversationId, options);
const newest = getNewestMessageForConversation(db, conversationId, options);
const oldestUnseen = getOldestUnseenMessageForConversation(
2024-07-22 18:16:33 +00:00
db,
conversationId,
options
);
2024-07-22 18:16:33 +00:00
const totalUnseen = getTotalUnseenForConversation(
db,
conversationId,
options
);
return {
oldest: oldest ? pick(oldest, ['received_at', 'sent_at', 'id']) : undefined,
newest: newest ? pick(newest, ['received_at', 'sent_at', 'id']) : undefined,
oldestUnseen: oldestUnseen
? pick(oldestUnseen, ['received_at', 'sent_at', 'id'])
: undefined,
totalUnseen,
};
}
2024-07-22 18:16:33 +00:00
function getConversationRangeCenteredOnMessage(
db: ReadableDB,
2023-03-04 03:03:15 +00:00
options: AdjacentMessagesByConversationOptionsType
2024-07-22 18:16:33 +00:00
): GetConversationRangeCenteredOnMessageResultType<MessageTypeUnhydrated> {
return db.transaction(() => {
return {
2024-07-22 18:16:33 +00:00
older: getAdjacentMessagesByConversation(
db,
2023-03-04 03:03:15 +00:00
AdjacentDirection.Older,
options
),
2024-07-22 18:16:33 +00:00
newer: getAdjacentMessagesByConversation(
db,
2023-03-04 03:03:15 +00:00
AdjacentDirection.Newer,
options
),
2024-07-22 18:16:33 +00:00
metrics: getMessageMetricsForConversation(db, options),
};
})();
}
2024-07-22 18:16:33 +00:00
function getAllCallHistory(db: ReadableDB): ReadonlyArray<CallHistoryDetails> {
2023-08-09 00:53:06 +00:00
const [query] = sql`
SELECT * FROM callsHistory;
`;
return db.prepare(query).all();
}
2023-01-10 00:52:01 +00:00
2024-07-22 18:16:33 +00:00
function clearCallHistory(
db: WritableDB,
2024-06-26 00:58:38 +00:00
target: CallLogEventTarget
2024-07-22 18:16:33 +00:00
): ReadonlyArray<string> {
2023-08-09 00:53:06 +00:00
return db.transaction(() => {
2024-07-10 21:10:08 +00:00
const timestamp = getMessageTimestampForCallLogEventTarget(db, target);
2023-08-09 00:53:06 +00:00
2024-06-26 00:58:38 +00:00
const [selectCallIdsQuery, selectCallIdsParams] = sql`
SELECT callsHistory.callId
FROM callsHistory
WHERE
-- Prior calls
(callsHistory.timestamp <= ${timestamp})
-- Unused call links
OR (
callsHistory.mode IS ${CALL_MODE_ADHOC} AND
callsHistory.status IS ${CALL_STATUS_PENDING}
);
2023-08-09 00:53:06 +00:00
`;
2024-06-26 00:58:38 +00:00
const callIds = db
.prepare(selectCallIdsQuery)
.pluck()
.all(selectCallIdsParams);
let deletedMessageIds: ReadonlyArray<string> = [];
batchMultiVarQuery(db, callIds, (ids: ReadonlyArray<string>): void => {
const [deleteMessagesQuery, deleteMessagesParams] = sql`
DELETE FROM messages
WHERE messages.type IS 'call-history'
AND messages.callId IN (${sqlJoin(ids)})
RETURNING id;
`;
const batchDeletedMessageIds = db
.prepare(deleteMessagesQuery)
.pluck()
.all(deleteMessagesParams);
deletedMessageIds = deletedMessageIds.concat(batchDeletedMessageIds);
});
2023-08-09 00:53:06 +00:00
const [clearCallsHistoryQuery, clearCallsHistoryParams] = sql`
UPDATE callsHistory
SET
status = ${DirectCallStatus.Deleted},
timestamp = ${Date.now()}
2024-06-26 00:58:38 +00:00
WHERE callsHistory.timestamp <= ${timestamp};
2023-08-09 00:53:06 +00:00
`;
try {
db.prepare(clearCallsHistoryQuery).run(clearCallsHistoryParams);
} catch (error) {
logger.error(error, error.message);
throw error;
}
2024-06-26 00:58:38 +00:00
return deletedMessageIds;
2023-08-09 00:53:06 +00:00
})();
}
2024-07-22 18:16:33 +00:00
function markCallHistoryDeleted(db: WritableDB, callId: string): void {
const [query, params] = sql`
UPDATE callsHistory
SET
status = ${DirectCallStatus.Deleted},
timestamp = ${Date.now()}
WHERE callId = ${callId};
`;
db.prepare(query).run(params);
}
2024-07-22 18:16:33 +00:00
function cleanupCallHistoryMessages(db: WritableDB): void {
return db.transaction(() => {
const [query, params] = sql`
DELETE FROM messages
WHERE messages.id IN (
SELECT messages.id FROM messages
LEFT JOIN callsHistory ON callsHistory.callId IS messages.callId
WHERE messages.type IS 'call-history'
AND callsHistory.status IS ${CALL_STATUS_DELETED}
)
`;
2024-07-22 18:16:33 +00:00
db.prepare(query).run(params);
})();
}
2024-07-22 18:16:33 +00:00
function getCallHistoryMessageByCallId(
db: ReadableDB,
options: {
conversationId: string;
callId: string;
}
): MessageType | undefined {
2023-08-09 00:53:06 +00:00
const [query, params] = sql`
SELECT json
FROM messages
WHERE conversationId = ${options.conversationId}
AND type = 'call-history'
AND callId = ${options.callId}
`;
const row = db.prepare(query).get(params);
if (row == null) {
return;
}
return jsonToObject(row.json);
}
2024-07-22 18:16:33 +00:00
function getCallHistory(
db: ReadableDB,
2023-08-09 00:53:06 +00:00
callId: string,
peerId: ServiceIdString | string
2024-07-22 18:16:33 +00:00
): CallHistoryDetails | undefined {
2023-08-09 00:53:06 +00:00
const [query, params] = sql`
SELECT * FROM callsHistory
WHERE callId IS ${callId}
AND peerId IS ${peerId};
`;
const row = db.prepare(query).get(params);
if (row == null) {
return;
}
return callHistoryDetailsSchema.parse(row);
}
const SEEN_STATUS_UNSEEN = sqlConstant(SeenStatus.Unseen);
const SEEN_STATUS_SEEN = sqlConstant(SeenStatus.Seen);
2024-06-26 00:58:38 +00:00
const CALL_STATUS_MISSED = sqlConstant(CallStatusValue.Missed);
const CALL_STATUS_DELETED = sqlConstant(CallStatusValue.Deleted);
const CALL_STATUS_PENDING = sqlConstant(CallStatusValue.Pending);
const CALL_STATUS_INCOMING = sqlConstant(CallDirection.Incoming);
2024-04-01 19:19:35 +00:00
const CALL_MODE_ADHOC = sqlConstant(CallMode.Adhoc);
2023-08-09 00:53:06 +00:00
const FOUR_HOURS_IN_MS = sqlConstant(4 * 60 * 60 * 1000);
2024-07-22 18:16:33 +00:00
function getCallHistoryUnreadCount(db: ReadableDB): number {
const [query, params] = sql`
SELECT count(*) FROM messages
LEFT JOIN callsHistory ON callsHistory.callId = messages.callId
WHERE messages.type IS 'call-history'
AND messages.seenStatus IS ${SEEN_STATUS_UNSEEN}
AND callsHistory.status IS ${CALL_STATUS_MISSED}
AND callsHistory.direction IS ${CALL_STATUS_INCOMING}
`;
const row = db.prepare(query).pluck().get(params);
return row;
}
2024-07-22 18:16:33 +00:00
function markCallHistoryRead(db: WritableDB, callId: string): void {
const jsonPatch = JSON.stringify({
seenStatus: SeenStatus.Seen,
});
const [query, params] = sql`
UPDATE messages
SET
2024-03-21 21:30:41 +00:00
seenStatus = ${SEEN_STATUS_SEEN},
json = json_patch(json, ${jsonPatch})
WHERE type IS 'call-history'
AND callId IS ${callId}
`;
db.prepare(query).run(params);
}
2024-07-10 21:10:08 +00:00
function getMessageTimestampForCallLogEventTarget(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
2024-06-26 00:58:38 +00:00
target: CallLogEventTarget
): number {
2024-07-10 21:10:08 +00:00
let { callId, peerId } = target;
const { timestamp } = target;
2024-07-10 21:10:08 +00:00
if (callId == null || peerId == null) {
const predicate =
peerId != null
? sqlFragment`callsHistory.peerId IS ${target.peerId}`
: sqlFragment`TRUE`;
// Get the most recent call history timestamp for the target.timestamp
const [selectQuery, selectParams] = sql`
2024-07-10 21:10:08 +00:00
SELECT callsHistory.callId, callsHistory.peerId
2024-06-26 00:58:38 +00:00
FROM callsHistory
2024-07-10 21:10:08 +00:00
WHERE ${predicate}
AND callsHistory.timestamp <= ${timestamp}
ORDER BY callsHistory.timestamp DESC
LIMIT 1
`;
2024-07-10 21:10:08 +00:00
const row = db.prepare(selectQuery).get(selectParams);
if (row == null) {
log.warn('getTimestampForCallLogEventTarget: Target call not found');
return timestamp;
2024-06-26 00:58:38 +00:00
}
2024-07-10 21:10:08 +00:00
callId = row.callId as string;
peerId = row.peerId as AciString;
}
const [selectQuery, selectParams] = sql`
SELECT messages.sent_at
FROM messages
WHERE messages.type IS 'call-history'
AND messages.conversationId IS ${peerId}
AND messages.callId IS ${callId}
LIMIT 1
`;
const messageTimestamp = db.prepare(selectQuery).pluck().get(selectParams);
if (messageTimestamp == null) {
log.warn(
'getTimestampForCallLogEventTarget: Target call message not found'
);
2024-06-26 00:58:38 +00:00
}
2024-07-10 21:10:08 +00:00
return messageTimestamp ?? target.timestamp;
2024-06-26 00:58:38 +00:00
}
2024-07-22 18:16:33 +00:00
export function markAllCallHistoryRead(
db: WritableDB,
2024-06-26 00:58:38 +00:00
target: CallLogEventTarget,
2024-07-22 18:16:33 +00:00
inConversation = false
2024-07-10 21:10:08 +00:00
): void {
if (inConversation) {
strictAssert(target.peerId, 'peerId is required');
}
2024-06-26 00:58:38 +00:00
db.transaction(() => {
const jsonPatch = JSON.stringify({
seenStatus: SeenStatus.Seen,
});
2024-07-10 21:10:08 +00:00
const timestamp = getMessageTimestampForCallLogEventTarget(db, target);
2024-06-26 00:58:38 +00:00
const predicate = inConversation
2024-07-10 21:10:08 +00:00
? sqlFragment`messages.conversationId IS ${target.peerId}`
2024-06-26 00:58:38 +00:00
: sqlFragment`TRUE`;
const [updateQuery, updateParams] = sql`
UPDATE messages
SET
seenStatus = ${SEEN_STATUS_SEEN},
json = json_patch(json, ${jsonPatch})
2024-07-10 21:10:08 +00:00
WHERE messages.type IS 'call-history'
AND ${predicate}
AND messages.seenStatus IS ${SEEN_STATUS_UNSEEN}
AND messages.sent_at <= ${timestamp}
`;
db.prepare(updateQuery).run(updateParams);
})();
}
2024-07-22 18:16:33 +00:00
function markAllCallHistoryReadInConversation(
db: WritableDB,
2024-06-26 00:58:38 +00:00
target: CallLogEventTarget
2024-07-22 18:16:33 +00:00
): void {
2024-06-26 00:58:38 +00:00
strictAssert(target.peerId, 'peerId is required');
2024-07-22 18:16:33 +00:00
markAllCallHistoryRead(db, target, true);
2024-06-26 00:58:38 +00:00
}
2024-07-22 18:16:33 +00:00
function getCallHistoryGroupData(
db: WritableDB,
2023-08-09 00:53:06 +00:00
isCount: boolean,
filter: CallHistoryFilter,
pagination: CallHistoryPagination
): unknown {
return db.transaction(() => {
const { limit, offset } = pagination;
2024-05-17 23:22:51 +00:00
const { status, conversationIds, callLinkRoomIds } = filter;
2023-08-09 00:53:06 +00:00
2024-05-17 23:22:51 +00:00
const isUsingTempTable = conversationIds != null || callLinkRoomIds != null;
if (isUsingTempTable) {
2023-08-09 00:53:06 +00:00
const [createTempTable] = sql`
2024-05-17 23:22:51 +00:00
CREATE TEMP TABLE temp_callHistory_filtered_peers (
conversationId TEXT,
2023-08-16 20:54:39 +00:00
serviceId TEXT,
2024-05-17 23:22:51 +00:00
groupId TEXT,
callLinkRoomId TEXT
2023-08-09 00:53:06 +00:00
);
`;
db.exec(createTempTable);
2024-05-17 23:22:51 +00:00
if (conversationIds != null) {
strictAssert(conversationIds.length > 0, "can't filter by empty array");
batchMultiVarQuery(db, conversationIds, ids => {
const idList = sqlJoin(ids.map(id => sqlFragment`${id}`));
const [insertQuery, insertParams] = sql`
INSERT INTO temp_callHistory_filtered_peers
(conversationId, serviceId, groupId)
SELECT id, serviceId, groupId
FROM conversations
WHERE conversations.id IN (${idList});
`;
2023-08-09 00:53:06 +00:00
2024-05-17 23:22:51 +00:00
db.prepare(insertQuery).run(insertParams);
});
}
2023-08-09 00:53:06 +00:00
2024-05-17 23:22:51 +00:00
if (callLinkRoomIds != null) {
strictAssert(callLinkRoomIds.length > 0, "can't filter by empty array");
2023-08-09 00:53:06 +00:00
2024-05-17 23:22:51 +00:00
batchMultiVarQuery(db, callLinkRoomIds, ids => {
const idList = sqlJoin(ids.map(id => sqlFragment`(${id})`));
const [insertQuery, insertParams] = sql`
INSERT INTO temp_callHistory_filtered_peers
(callLinkRoomId)
VALUES ${idList};
`;
db.prepare(insertQuery).run(insertParams);
});
}
2023-08-09 00:53:06 +00:00
}
2024-07-22 18:16:33 +00:00
// peerId can be a conversation id (legacy), a serviceId, groupId, or call
// link roomId
2024-05-17 23:22:51 +00:00
const innerJoin = isUsingTempTable
? sqlFragment`
INNER JOIN temp_callHistory_filtered_peers ON (
temp_callHistory_filtered_peers.conversationId IS c.peerId
OR temp_callHistory_filtered_peers.serviceId IS c.peerId
OR temp_callHistory_filtered_peers.groupId IS c.peerId
OR temp_callHistory_filtered_peers.callLinkRoomId IS c.peerId
)
`
: sqlFragment``;
2023-08-09 00:53:06 +00:00
const filterClause =
status === CallHistoryFilterStatus.All
? sqlFragment`status IS NOT ${CALL_STATUS_DELETED}`
: sqlFragment`
direction IS ${CALL_STATUS_INCOMING} AND
status IS ${CALL_STATUS_MISSED} AND status IS NOT ${CALL_STATUS_DELETED}
`;
2023-08-09 00:53:06 +00:00
const offsetLimit =
limit > 0 ? sqlFragment`LIMIT ${limit} OFFSET ${offset}` : sqlFragment``;
2024-07-22 18:16:33 +00:00
// COUNT(*) OVER(): As a result of GROUP BY in the query (to limit adhoc
// call history to the single latest call), COUNT(*) changes to counting
// each group's counts rather than the total number of rows. Example: Say
// we have 2 group calls (A and B) and 10 adhoc calls on a single link.
// COUNT(*) ... GROUP BY returns [1, 1, 10] corresponding with callId A,
// callId B, adhoc peerId (the GROUP conditions). However we want COUNT(*)
// to do the normal thing and return total rows (so in the example above
// we want 3). COUNT(*) OVER achieves this.
2023-08-09 00:53:06 +00:00
const projection = isCount
2024-04-01 19:19:35 +00:00
? sqlFragment`COUNT(*) OVER() AS count`
2023-08-09 00:53:06 +00:00
: sqlFragment`peerId, ringerId, mode, type, direction, status, timestamp, possibleChildren, inPeriod`;
const [query, params] = sql`
SELECT
${projection}
FROM (
-- 1. 'callAndGroupInfo': This section collects metadata to determine the
-- parent and children of each call. We can identify the real parents of calls
-- within the query, but we need to build the children at runtime.
WITH callAndGroupInfo AS (
SELECT
*,
-- 1a. 'possibleParent': This identifies the first call that _could_ be
-- considered the current call's parent. Note: The 'possibleParent' is not
-- necessarily the true parent if there is another call between them that
-- isn't a part of the group.
(
SELECT callId
FROM callsHistory
WHERE
callsHistory.direction IS c.direction
AND callsHistory.type IS c.type
AND callsHistory.peerId IS c.peerId
AND (callsHistory.timestamp - ${FOUR_HOURS_IN_MS}) <= c.timestamp
AND callsHistory.timestamp >= c.timestamp
-- Tracking Android & Desktop separately to make the queries easier to compare
-- Android Constraints:
AND (
(callsHistory.status IS c.status AND callsHistory.status IS ${CALL_STATUS_MISSED}) OR
(callsHistory.status IS NOT ${CALL_STATUS_MISSED} AND c.status IS NOT ${CALL_STATUS_MISSED})
2023-08-09 00:53:06 +00:00
)
-- Desktop Constraints:
AND callsHistory.status IS c.status
AND ${filterClause}
ORDER BY timestamp DESC
) as possibleParent,
-- 1b. 'possibleChildren': This identifies all possible calls that can
-- be grouped with the current call. Note: This current call is not
-- necessarily the parent, and not all possible children will end up as
-- children as they might have another parent
(
SELECT JSON_GROUP_ARRAY(
JSON_OBJECT(
'callId', callId,
'timestamp', timestamp
)
)
FROM callsHistory
WHERE
callsHistory.direction IS c.direction
AND callsHistory.type IS c.type
AND callsHistory.peerId IS c.peerId
AND (c.timestamp - ${FOUR_HOURS_IN_MS}) <= callsHistory.timestamp
AND c.timestamp >= callsHistory.timestamp
-- Tracking Android & Desktop separately to make the queries easier to compare
-- Android Constraints:
AND (
(callsHistory.status IS c.status AND callsHistory.status IS ${CALL_STATUS_MISSED}) OR
(callsHistory.status IS NOT ${CALL_STATUS_MISSED} AND c.status IS NOT ${CALL_STATUS_MISSED})
2023-08-09 00:53:06 +00:00
)
-- Desktop Constraints:
AND callsHistory.status IS c.status
AND ${filterClause}
ORDER BY timestamp DESC
) as possibleChildren,
-- 1c. 'inPeriod': This identifies all calls in a time period after the
-- current call. They may or may not be a part of the group.
(
SELECT GROUP_CONCAT(callId)
FROM callsHistory
WHERE
(c.timestamp - ${FOUR_HOURS_IN_MS}) <= callsHistory.timestamp
AND c.timestamp >= callsHistory.timestamp
AND ${filterClause}
) AS inPeriod
FROM callsHistory AS c
${innerJoin}
WHERE
${filterClause}
ORDER BY timestamp DESC
)
-- 2. 'isParent': We need to identify the true parent of the group in cases
-- where the previous call is not a part of the group.
SELECT
*,
CASE
WHEN LAG (possibleParent, 1, 0) OVER (
-- Note: This is an optimization assuming that we've already got 'timestamp DESC' ordering
-- from the query above. If we find that ordering isn't always correct, we can uncomment this:
-- ORDER BY timestamp DESC
) != possibleParent THEN callId
ELSE possibleParent
END AS parent
FROM callAndGroupInfo
) AS parentCallAndGroupInfo
WHERE parent = parentCallAndGroupInfo.callId
2024-04-01 19:19:35 +00:00
GROUP BY
CASE
-- By spec, limit adhoc call history to the most recent call
WHEN mode IS ${CALL_MODE_ADHOC} THEN peerId
ELSE callId
END
2023-08-09 00:53:06 +00:00
ORDER BY parentCallAndGroupInfo.timestamp DESC
${offsetLimit};
`;
const result = isCount
? db.prepare(query).pluck(true).get(params)
: db.prepare(query).all(params);
2024-05-17 23:22:51 +00:00
if (isUsingTempTable) {
2023-08-09 00:53:06 +00:00
const [dropTempTableQuery] = sql`
2024-05-17 23:22:51 +00:00
DROP TABLE temp_callHistory_filtered_peers;
2023-08-09 00:53:06 +00:00
`;
db.exec(dropTempTableQuery);
}
return result;
})();
}
const countSchema = z.number().int().nonnegative();
2024-07-22 18:16:33 +00:00
function getCallHistoryGroupsCount(
db: ReadableDB,
2023-08-09 00:53:06 +00:00
filter: CallHistoryFilter
2024-07-22 18:16:33 +00:00
): number {
// getCallHistoryGroupData creates a temporary table and thus requires
2023-10-03 00:27:02 +00:00
// write access.
2024-07-22 18:16:33 +00:00
const writable = toUnsafeWritableDB(db, 'only temp table use');
const result = getCallHistoryGroupData(writable, true, filter, {
2023-08-09 00:53:06 +00:00
limit: 0,
offset: 0,
});
2024-05-17 23:22:51 +00:00
if (result == null) {
return 0;
}
2023-08-09 00:53:06 +00:00
return countSchema.parse(result);
}
const groupsDataSchema = z.array(
callHistoryGroupSchema.omit({ children: true }).extend({
possibleChildren: z.string(),
inPeriod: z.string(),
})
);
const possibleChildrenSchema = z.array(
callHistoryDetailsSchema.pick({
callId: true,
timestamp: true,
})
);
2024-07-22 18:16:33 +00:00
function getCallHistoryGroups(
db: ReadableDB,
2023-08-09 00:53:06 +00:00
filter: CallHistoryFilter,
pagination: CallHistoryPagination
2024-07-22 18:16:33 +00:00
): Array<CallHistoryGroup> {
// getCallHistoryGroupData creates a temporary table and thus requires
2023-10-03 00:27:02 +00:00
// write access.
2024-07-22 18:16:33 +00:00
const writable = toUnsafeWritableDB(db, 'only temp table use');
2023-08-09 00:53:06 +00:00
const groupsData = groupsDataSchema.parse(
2024-07-22 18:16:33 +00:00
getCallHistoryGroupData(writable, false, filter, pagination)
2023-08-09 00:53:06 +00:00
);
const taken = new Set<string>();
return groupsData
.map(groupData => {
return {
...groupData,
possibleChildren: possibleChildrenSchema.parse(
JSON.parse(groupData.possibleChildren)
),
inPeriod: new Set(groupData.inPeriod.split(',')),
};
})
.reverse()
.map(group => {
2024-05-22 16:24:27 +00:00
const { possibleChildren, inPeriod, type, ...rest } = group;
2023-08-09 00:53:06 +00:00
const children = [];
for (const child of possibleChildren) {
if (!taken.has(child.callId) && inPeriod.has(child.callId)) {
children.push(child);
taken.add(child.callId);
2024-05-22 16:24:27 +00:00
if (type === CallType.Adhoc) {
// By spec, limit adhoc call history to the most recent call
break;
}
2023-08-09 00:53:06 +00:00
}
}
2024-05-22 16:24:27 +00:00
return callHistoryGroupSchema.parse({ ...rest, type, children });
2023-08-09 00:53:06 +00:00
})
.reverse();
}
2024-07-22 18:16:33 +00:00
function saveCallHistory(
db: WritableDB,
2024-07-10 21:10:08 +00:00
callHistory: CallHistoryDetails
): void {
2023-08-09 00:53:06 +00:00
const [insertQuery, insertParams] = sql`
INSERT OR REPLACE INTO callsHistory (
2023-01-10 00:52:01 +00:00
callId,
2023-08-09 00:53:06 +00:00
peerId,
ringerId,
mode,
type,
direction,
status,
timestamp
) VALUES (
${callHistory.callId},
${callHistory.peerId},
${callHistory.ringerId},
${callHistory.mode},
${callHistory.type},
${callHistory.direction},
${callHistory.status},
${callHistory.timestamp}
);
`;
2023-01-10 00:52:01 +00:00
2023-08-09 00:53:06 +00:00
db.prepare(insertQuery).run(insertParams);
2023-01-10 00:52:01 +00:00
}
2024-07-22 18:16:33 +00:00
function hasGroupCallHistoryMessage(
db: ReadableDB,
conversationId: string,
eraId: string
2024-07-22 18:16:33 +00:00
): boolean {
2022-11-28 17:19:48 +00:00
const exists: number = db
.prepare<Query>(
`
2022-11-28 17:19:48 +00:00
SELECT EXISTS(
SELECT 1 FROM messages
WHERE conversationId = $conversationId
AND type = 'call-history'
AND json_extract(json, '$.callHistoryDetails.callMode') = 'Group'
AND json_extract(json, '$.callHistoryDetails.eraId') = $eraId
);
`
)
2022-11-28 17:19:48 +00:00
.pluck()
.get({
conversationId,
eraId,
});
2022-11-28 17:19:48 +00:00
return exists !== 0;
}
2024-07-22 18:16:33 +00:00
function _markCallHistoryMissed(
db: WritableDB,
callIds: ReadonlyArray<string>
) {
2024-02-08 18:01:30 +00:00
batchMultiVarQuery(db, callIds, batch => {
const [updateQuery, updateParams] = sql`
UPDATE callsHistory
SET status = ${sqlConstant(GroupCallStatus.Missed)}
WHERE callId IN (${sqlJoin(batch)})
`;
return db.prepare(updateQuery).run(updateParams);
});
}
2024-07-22 18:16:33 +00:00
function markCallHistoryMissed(
db: WritableDB,
2024-02-08 18:01:30 +00:00
callIds: ReadonlyArray<string>
2024-07-22 18:16:33 +00:00
): void {
2024-02-08 18:01:30 +00:00
return db.transaction(() => _markCallHistoryMissed(db, callIds))();
}
export type MaybeStaleCallHistory = Readonly<
Pick<CallHistoryDetails, 'callId' | 'peerId'>
>;
2024-07-22 18:16:33 +00:00
function getRecentStaleRingsAndMarkOlderMissed(
db: WritableDB
): ReadonlyArray<MaybeStaleCallHistory> {
2024-02-08 18:01:30 +00:00
return db.transaction(() => {
const [selectQuery, selectParams] = sql`
SELECT callId, peerId FROM callsHistory
WHERE
type = ${sqlConstant(CallType.Group)} AND
status = ${sqlConstant(GroupCallStatus.Ringing)}
ORDER BY timestamp DESC
`;
const ringingCalls = db.prepare(selectQuery).all(selectParams);
const seen = new Set<string>();
const [latestCalls, pastCalls] = partition(ringingCalls, result => {
if (seen.size >= 10) {
return false;
}
if (seen.has(result.peerId)) {
return false;
}
seen.add(result.peerId);
return true;
});
_markCallHistoryMissed(
db,
pastCalls.map(result => result.callId)
);
// These are returned so we can peek them.
return latestCalls;
})();
}
2024-07-22 18:16:33 +00:00
export function migrateConversationMessages(
db: WritableDB,
obsoleteId: string,
currentId: string
2024-07-22 18:16:33 +00:00
): void {
const PAGE_SIZE = 1000;
const getPage = db.prepare(`
SELECT
rowid,
json -> '$.sendStateByConversationId' AS sendStateJson,
json -> '$.editHistory' AS editHistoryJson
FROM messages
WHERE conversationId IS $obsoleteId
ORDER BY rowid
LIMIT $pageSize OFFSET $offset`);
const updateOne = db.prepare(`
UPDATE messages
SET
conversationId = $currentId,
json = json_patch(json, $patch)
WHERE
rowid IS $rowid
`);
db.transaction(() => {
// eslint-disable-next-line no-constant-condition
for (let offset = 0; true; offset += PAGE_SIZE) {
const parts: Array<{
rowid: number;
sendStateJson?: string;
editHistoryJson?: string;
}> = getPage.all({ obsoleteId, pageSize: PAGE_SIZE, offset });
for (const { rowid, sendStateJson, editHistoryJson } of parts) {
const editHistory = JSON.parse(editHistoryJson || '[]') as Array<{
sendStateByConversationId?: Record<string, unknown>;
}>;
const sendState = JSON.parse(sendStateJson || '{}');
const patch = {
conversationId: currentId,
sendStateByConversationId: {
[obsoleteId]: null,
[currentId]: sendState[obsoleteId],
},
// Unlike above here we have to provide the full object with all
// existing properties because arrays can't be patched and can only
// be replaced.
editHistory: editHistory.map(
({ sendStateByConversationId, ...rest }) => {
const existingState = sendStateByConversationId?.[obsoleteId];
if (!existingState) {
return rest;
}
return {
...rest,
sendStateByConversationId: {
...sendStateByConversationId,
[obsoleteId]: undefined,
[currentId]: existingState,
},
};
}
),
};
updateOne.run({
rowid,
patch: JSON.stringify(patch),
currentId,
});
}
if (parts.length < PAGE_SIZE) {
break;
}
}
})();
}
2024-07-22 18:16:33 +00:00
function getMessagesBySentAt(
db: ReadableDB,
sentAt: number
2024-07-22 18:16:33 +00:00
): Array<MessageType> {
const [query, params] = sql`
SELECT messages.json, received_at, sent_at FROM edited_messages
INNER JOIN messages ON
messages.id = edited_messages.messageId
WHERE edited_messages.sentAt = ${sentAt}
UNION
SELECT json, received_at, sent_at FROM messages
WHERE sent_at = ${sentAt}
ORDER BY messages.received_at DESC, messages.sent_at DESC;
`;
const rows = db.prepare(query).all(params);
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getExpiredMessages(db: ReadableDB): Array<MessageType> {
const now = Date.now();
const rows: JSONRows = db
.prepare<Query>(
`
SELECT json FROM messages WHERE
2021-06-16 22:20:17 +00:00
expiresAt <= $now
ORDER BY expiresAt ASC;
`
)
2021-06-16 22:20:17 +00:00
.all({ now });
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getMessagesUnexpectedlyMissingExpirationStartTimestamp(
db: ReadableDB
): Array<MessageType> {
const rows: JSONRows = db
.prepare<EmptyQuery>(
`
SELECT json FROM messages
INDEXED BY messages_unexpectedly_missing_expiration_start_timestamp
WHERE
expireTimer > 0 AND
2021-06-16 22:20:17 +00:00
expirationStartTimestamp IS NULL AND
(
type IS 'outgoing' OR
(type IS 'incoming' AND (
readStatus = ${ReadStatus.Read} OR
readStatus = ${ReadStatus.Viewed} OR
readStatus IS NULL
))
);
`
)
.all();
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getSoonestMessageExpiry(db: ReadableDB): undefined | number {
2021-06-16 22:20:17 +00:00
// Note: we use `pluck` to only get the first column.
const result: null | number = db
.prepare<EmptyQuery>(
`
2021-06-16 22:20:17 +00:00
SELECT MIN(expiresAt)
FROM messages;
`
)
2021-06-16 22:20:17 +00:00
.pluck(true)
.get();
2023-01-27 17:47:24 +00:00
if (result != null && result >= Number.MAX_SAFE_INTEGER) {
return undefined;
}
2021-06-16 22:20:17 +00:00
return result || undefined;
}
2024-07-22 18:16:33 +00:00
function getNextTapToViewMessageTimestampToAgeOut(
db: ReadableDB
): undefined | number {
const row = db
.prepare<EmptyQuery>(
`
SELECT json FROM messages
WHERE
isViewOnce = 1
AND (isErased IS NULL OR isErased != 1)
ORDER BY received_at ASC, sent_at ASC
LIMIT 1;
`
)
.get();
2019-06-26 19:33:13 +00:00
if (!row) {
return undefined;
2019-06-26 19:33:13 +00:00
}
2021-10-26 22:59:08 +00:00
const data = jsonToObject<MessageType>(row.json);
const result = data.received_at_ms || data.received_at;
return isNormalNumber(result) ? result : undefined;
2019-06-26 19:33:13 +00:00
}
2024-07-22 18:16:33 +00:00
function getTapToViewMessagesNeedingErase(db: ReadableDB): Array<MessageType> {
2019-06-26 19:33:13 +00:00
const THIRTY_DAYS_AGO = Date.now() - 30 * 24 * 60 * 60 * 1000;
const rows: JSONRows = db
.prepare<Query>(
`
SELECT json
FROM messages
WHERE
isViewOnce = 1
AND (isErased IS NULL OR isErased != 1)
AND received_at <= $THIRTY_DAYS_AGO
ORDER BY received_at ASC, sent_at ASC;
`
)
.all({
THIRTY_DAYS_AGO,
});
2019-06-26 19:33:13 +00:00
return rows.map(row => jsonToObject(row.json));
2019-06-26 19:33:13 +00:00
}
const MAX_UNPROCESSED_ATTEMPTS = 10;
2021-09-17 23:11:24 +00:00
2024-07-22 18:16:33 +00:00
function saveUnprocessed(db: WritableDB, data: UnprocessedType): string {
const {
id,
timestamp,
receivedAtCounter,
version,
attempts,
envelope,
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
serverGuid,
serverTimestamp,
decrypted,
urgent,
story,
} = data;
if (!id) {
2024-07-22 18:16:33 +00:00
throw new Error('saveUnprocessed: id was falsey');
}
prepare(
db,
`
INSERT OR REPLACE INTO unprocessed (
id,
timestamp,
receivedAtCounter,
version,
attempts,
envelope,
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
serverGuid,
serverTimestamp,
decrypted,
urgent,
story
) values (
$id,
$timestamp,
$receivedAtCounter,
$version,
$attempts,
$envelope,
$source,
2023-08-16 20:54:39 +00:00
$sourceServiceId,
$sourceDevice,
$serverGuid,
$serverTimestamp,
$decrypted,
$urgent,
$story
);
`
).run({
id,
timestamp,
receivedAtCounter: receivedAtCounter ?? null,
version,
attempts,
envelope: envelope || null,
source: source || null,
2023-08-16 20:54:39 +00:00
sourceServiceId: sourceServiceId || null,
sourceDevice: sourceDevice || null,
serverGuid: serverGuid || null,
serverTimestamp: serverTimestamp || null,
decrypted: decrypted || null,
urgent: urgent || !isBoolean(urgent) ? 1 : 0,
story: story ? 1 : 0,
});
return id;
}
2024-07-22 18:16:33 +00:00
function updateUnprocessedWithData(
db: WritableDB,
id: string,
data: UnprocessedUpdateType
): void {
const {
source,
2023-08-16 20:54:39 +00:00
sourceServiceId,
sourceDevice,
serverGuid,
serverTimestamp,
decrypted,
} = data;
prepare(
db,
`
UPDATE unprocessed SET
source = $source,
2023-08-16 20:54:39 +00:00
sourceServiceId = $sourceServiceId,
sourceDevice = $sourceDevice,
serverGuid = $serverGuid,
serverTimestamp = $serverTimestamp,
decrypted = $decrypted
WHERE id = $id;
`
).run({
id,
source: source || null,
2023-08-16 20:54:39 +00:00
sourceServiceId: sourceServiceId || null,
sourceDevice: sourceDevice || null,
serverGuid: serverGuid || null,
serverTimestamp: serverTimestamp || null,
decrypted: decrypted || null,
});
}
2024-07-22 18:16:33 +00:00
function updateUnprocessedsWithData(
db: WritableDB,
arrayOfUnprocessed: Array<{ id: string; data: UnprocessedUpdateType }>
2024-07-22 18:16:33 +00:00
): void {
db.transaction(() => {
for (const { id, data } of arrayOfUnprocessed) {
2024-07-22 18:16:33 +00:00
updateUnprocessedWithData(db, id, data);
}
})();
2019-09-26 19:56:31 +00:00
}
2024-07-22 18:16:33 +00:00
function getUnprocessedById(
db: ReadableDB,
id: string
2024-07-22 18:16:33 +00:00
): UnprocessedType | undefined {
const row = db
.prepare<Query>('SELECT * FROM unprocessed WHERE id = $id;')
.get({
id,
});
return {
...row,
urgent: isNumber(row.urgent) ? Boolean(row.urgent) : true,
story: Boolean(row.story),
};
}
2024-07-22 18:16:33 +00:00
function getUnprocessedCount(db: ReadableDB): number {
return getCountFromTable(db, 'unprocessed');
}
2024-07-22 18:16:33 +00:00
function getAllUnprocessedIds(db: WritableDB): Array<string> {
log.info('getAllUnprocessedIds');
return db.transaction(() => {
// cleanup first
const { changes: deletedStaleCount } = db
.prepare<Query>('DELETE FROM unprocessed WHERE timestamp < $monthAgo')
.run({
monthAgo: Date.now() - durations.MONTH,
});
2021-12-14 01:25:44 +00:00
if (deletedStaleCount !== 0) {
logger.warn(
'getAllUnprocessedAndIncrementAttempts: ' +
`deleting ${deletedStaleCount} old unprocessed envelopes`
);
}
2021-12-14 01:25:44 +00:00
const { changes: deletedInvalidCount } = db
.prepare<Query>(
`
DELETE FROM unprocessed
WHERE attempts >= $MAX_UNPROCESSED_ATTEMPTS
`
)
.run({ MAX_UNPROCESSED_ATTEMPTS });
if (deletedInvalidCount !== 0) {
logger.warn(
'getAllUnprocessedAndIncrementAttempts: ' +
`deleting ${deletedInvalidCount} invalid unprocessed envelopes`
);
}
return db
.prepare<EmptyQuery>(
`
2023-03-20 22:23:53 +00:00
SELECT id
FROM unprocessed
ORDER BY receivedAtCounter ASC
`
)
.pluck()
.all();
})();
}
2024-07-22 18:16:33 +00:00
function getUnprocessedByIdsAndIncrementAttempts(
db: WritableDB,
ids: ReadonlyArray<string>
2024-07-22 18:16:33 +00:00
): Array<UnprocessedType> {
log.info('getUnprocessedByIdsAndIncrementAttempts', { totalIds: ids.length });
batchMultiVarQuery(db, ids, batch => {
return db
.prepare<ArrayQuery>(
`
UPDATE unprocessed
SET attempts = attempts + 1
WHERE id IN (${batch.map(() => '?').join(', ')})
`
)
.run(batch);
});
return batchMultiVarQuery(db, ids, batch => {
return db
.prepare<ArrayQuery>(
`
SELECT *
FROM unprocessed
WHERE id IN (${batch.map(() => '?').join(', ')})
ORDER BY receivedAtCounter ASC;
`
)
.all(batch)
.map(row => ({
...row,
urgent: isNumber(row.urgent) ? Boolean(row.urgent) : true,
story: Boolean(row.story),
}));
});
}
2024-07-22 18:16:33 +00:00
function removeUnprocesseds(db: WritableDB, ids: ReadonlyArray<string>): void {
log.info('removeUnprocesseds', { totalIds: ids.length });
db.prepare<ArrayQuery>(
`
DELETE FROM unprocessed
WHERE id IN ( ${ids.map(() => '?').join(', ')} );
`
).run(ids);
}
2024-07-22 18:16:33 +00:00
function removeUnprocessed(db: WritableDB, id: string | Array<string>): void {
log.info('removeUnprocessedSync', { id });
2021-10-26 22:59:08 +00:00
if (!Array.isArray(id)) {
prepare(db, 'DELETE FROM unprocessed WHERE id = $id;').run({ id });
return;
}
// This can happen normally due to flushing of `cacheRemoveBatcher` in
// MessageReceiver.
if (!id.length) {
return;
}
2024-07-22 18:16:33 +00:00
batchMultiVarQuery(db, id, batch => removeUnprocesseds(db, batch));
}
2024-07-22 18:16:33 +00:00
function removeAllUnprocessed(db: WritableDB): void {
db.prepare<EmptyQuery>('DELETE FROM unprocessed;').run();
}
// Attachment Downloads
function getAttachmentDownloadJob(
2024-07-22 18:16:33 +00:00
db: ReadableDB,
job: Pick<
AttachmentDownloadJobType,
'messageId' | 'attachmentType' | 'digest'
>
): AttachmentDownloadJobType {
const [query, params] = sql`
SELECT * FROM attachment_downloads
WHERE
messageId = ${job.messageId}
2024-05-20 18:15:39 +00:00
AND
attachmentType = ${job.attachmentType}
2024-05-20 18:15:39 +00:00
AND
digest = ${job.digest};
`;
return db.prepare(query).get(params);
}
2024-07-22 18:16:33 +00:00
function getNextAttachmentDownloadJobs(
db: WritableDB,
{
limit = 3,
prioritizeMessageIds,
timestamp = Date.now(),
maxLastAttemptForPrioritizedMessages,
}: {
limit: number;
prioritizeMessageIds?: Array<string>;
timestamp?: number;
maxLastAttemptForPrioritizedMessages?: number;
}
): Array<AttachmentDownloadJobType> {
let priorityJobs = [];
// First, try to get jobs for prioritized messages (e.g. those currently user-visible)
if (prioritizeMessageIds?.length) {
const [priorityQuery, priorityParams] = sql`
SELECT * FROM attachment_downloads
-- very few rows will match messageIds, so in this case we want to optimize
-- the WHERE clause rather than the ORDER BY
INDEXED BY attachment_downloads_active_messageId
WHERE
active = 0
AND
-- for priority messages, we want to retry based on the last attempt, rather than retryAfter
(lastAttemptTimestamp is NULL OR lastAttemptTimestamp <= ${
maxLastAttemptForPrioritizedMessages ?? timestamp - durations.HOUR
})
AND
messageId IN (${sqlJoin(prioritizeMessageIds)})
2024-05-20 18:15:39 +00:00
-- for priority messages, let's load them oldest first; this helps, e.g. for stories where we
-- want the oldest one first
ORDER BY receivedAt ASC
LIMIT ${limit}
`;
priorityJobs = db.prepare(priorityQuery).all(priorityParams);
}
// Next, get any other jobs, sorted by receivedAt
const numJobsRemaining = limit - priorityJobs.length;
let standardJobs = [];
if (numJobsRemaining > 0) {
const [query, params] = sql`
SELECT * FROM attachment_downloads
WHERE
active = 0
AND
(retryAfter is NULL OR retryAfter <= ${timestamp})
ORDER BY receivedAt DESC
LIMIT ${numJobsRemaining}
`;
standardJobs = db.prepare(query).all(params);
}
const allJobs = priorityJobs.concat(standardJobs);
const INNER_ERROR = 'jsonToObject or SchemaParse error';
try {
return allJobs.map(row => {
try {
return attachmentDownloadJobSchema.parse({
...row,
active: Boolean(row.active),
attachment: jsonToObject(row.attachmentJson),
});
} catch (error) {
logger.error(
`getNextAttachmentDownloadJobs: Error with job for message ${row.messageId}, deleting.`
);
2024-07-22 18:16:33 +00:00
removeAttachmentDownloadJob(db, row);
throw new Error(error);
}
});
} catch (error) {
if ('message' in error && error.message === INNER_ERROR) {
2024-07-22 18:16:33 +00:00
return getNextAttachmentDownloadJobs(db, {
limit,
prioritizeMessageIds,
timestamp,
maxLastAttemptForPrioritizedMessages,
});
}
throw error;
}
}
2024-07-22 18:16:33 +00:00
function saveAttachmentDownloadJob(
db: WritableDB,
job: AttachmentDownloadJobType
2024-07-22 18:16:33 +00:00
): void {
const [query, params] = sql`
INSERT OR REPLACE INTO attachment_downloads (
messageId,
attachmentType,
digest,
receivedAt,
sentAt,
contentType,
size,
active,
attempts,
retryAfter,
lastAttemptTimestamp,
attachmentJson
) VALUES (
${job.messageId},
${job.attachmentType},
${job.digest},
${job.receivedAt},
${job.sentAt},
${job.contentType},
${job.size},
${job.active ? 1 : 0},
${job.attempts},
${job.retryAfter},
${job.lastAttemptTimestamp},
${objectToJSON(job.attachment)}
);
`;
db.prepare(query).run(params);
}
2024-07-22 18:16:33 +00:00
function resetAttachmentDownloadActive(db: WritableDB): void {
db.prepare<EmptyQuery>(
`
UPDATE attachment_downloads
SET active = 0
WHERE active != 0;
`
).run();
}
2024-07-22 18:16:33 +00:00
function removeAttachmentDownloadJob(
db: WritableDB,
job: AttachmentDownloadJobType
): void {
const [query, params] = sql`
DELETE FROM attachment_downloads
2024-05-20 18:15:39 +00:00
WHERE
messageId = ${job.messageId}
AND
2024-05-20 18:15:39 +00:00
attachmentType = ${job.attachmentType}
AND
digest = ${job.digest};
`;
db.prepare(query).run(params);
}
2024-05-29 23:46:43 +00:00
// Backup Attachments
2024-07-22 18:16:33 +00:00
function clearAllAttachmentBackupJobs(db: WritableDB): void {
db.prepare('DELETE FROM attachment_backup_jobs;').run();
}
2024-07-22 18:16:33 +00:00
function markAllAttachmentBackupJobsInactive(db: WritableDB): void {
2024-05-29 23:46:43 +00:00
db.prepare<EmptyQuery>(
`
UPDATE attachment_backup_jobs
SET active = 0;
`
).run();
}
2024-07-22 18:16:33 +00:00
function saveAttachmentBackupJob(
db: WritableDB,
2024-05-29 23:46:43 +00:00
job: AttachmentBackupJobType
2024-07-22 18:16:33 +00:00
): void {
2024-05-29 23:46:43 +00:00
const [query, params] = sql`
INSERT OR REPLACE INTO attachment_backup_jobs (
active,
attempts,
data,
lastAttemptTimestamp,
2024-06-26 00:58:38 +00:00
mediaName,
2024-05-29 23:46:43 +00:00
receivedAt,
retryAfter,
type
) VALUES (
${job.active ? 1 : 0},
${job.attempts},
${objectToJSON(job.data)},
${job.lastAttemptTimestamp},
${job.mediaName},
${job.receivedAt},
${job.retryAfter},
${job.type}
);
`;
db.prepare(query).run(params);
}
2024-07-22 18:16:33 +00:00
function getNextAttachmentBackupJobs(
db: WritableDB,
{
limit,
timestamp = Date.now(),
}: {
limit: number;
timestamp?: number;
}
): Array<AttachmentBackupJobType> {
2024-05-29 23:46:43 +00:00
const [query, params] = sql`
SELECT * FROM attachment_backup_jobs
WHERE
active = 0
AND
(retryAfter is NULL OR retryAfter <= ${timestamp})
ORDER BY
-- type is "standard" or "thumbnail"; we prefer "standard" jobs
type ASC, receivedAt DESC
2024-05-29 23:46:43 +00:00
LIMIT ${limit}
`;
const rows = db.prepare(query).all(params);
return rows
.map(row => {
const parseResult = attachmentBackupJobSchema.safeParse({
...row,
active: Boolean(row.active),
data: jsonToObject(row.data),
});
if (!parseResult.success) {
const redactedMediaName = redactGenericText(row.mediaName);
logger.error(
`getNextAttachmentBackupJobs: invalid data, removing. mediaName: ${redactedMediaName}`,
Errors.toLogFormat(parseResult.error)
);
2024-07-22 18:16:33 +00:00
removeAttachmentBackupJob(db, { mediaName: row.mediaName });
2024-05-29 23:46:43 +00:00
return null;
}
return parseResult.data;
})
.filter(isNotNil);
}
2024-07-22 18:16:33 +00:00
function removeAttachmentBackupJob(
db: WritableDB,
2024-05-29 23:46:43 +00:00
job: Pick<AttachmentBackupJobType, 'mediaName'>
): void {
const [query, params] = sql`
2024-07-22 18:16:33 +00:00
DELETE FROM attachment_backup_jobs
WHERE
mediaName = ${job.mediaName};
`;
2024-05-29 23:46:43 +00:00
db.prepare(query).run(params);
}
// Attachments on backup CDN
2024-07-22 18:16:33 +00:00
function clearAllBackupCdnObjectMetadata(db: WritableDB): void {
2024-05-29 23:46:43 +00:00
db.prepare('DELETE FROM backup_cdn_object_metadata;').run();
}
2024-07-22 18:16:33 +00:00
function saveBackupCdnObjectMetadata(
db: WritableDB,
2024-05-29 23:46:43 +00:00
storedMediaObjects: Array<BackupCdnMediaObjectType>
2024-07-22 18:16:33 +00:00
): void {
2024-05-29 23:46:43 +00:00
db.transaction(() => {
for (const obj of storedMediaObjects) {
2024-07-22 18:16:33 +00:00
const { mediaId, cdnNumber, sizeOnBackupCdn } = obj;
const [query, params] = sql`
INSERT OR REPLACE INTO backup_cdn_object_metadata
(
mediaId,
cdnNumber,
sizeOnBackupCdn
) VALUES (
${mediaId},
${cdnNumber},
${sizeOnBackupCdn}
);
`;
db.prepare(query).run(params);
2024-05-29 23:46:43 +00:00
}
})();
}
2024-07-22 18:16:33 +00:00
function getBackupCdnObjectMetadata(
db: ReadableDB,
2024-05-29 23:46:43 +00:00
mediaId: string
2024-07-22 18:16:33 +00:00
): BackupCdnMediaObjectType | undefined {
2024-07-24 00:31:40 +00:00
const [query, params] =
sql`SELECT * from backup_cdn_object_metadata WHERE mediaId = ${mediaId}`;
2024-05-29 23:46:43 +00:00
return db.prepare(query).get(params);
}
// Stickers
2024-07-22 18:16:33 +00:00
function createOrUpdateStickerPack(
db: WritableDB,
pack: StickerPackType
): void {
const {
attemptedStatus,
author,
coverStickerId,
createdAt,
downloadAttempts,
id,
installedAt,
key,
lastUsed,
status,
stickerCount,
title,
2022-08-03 17:10:49 +00:00
storageID,
storageVersion,
storageUnknownFields,
storageNeedsSync,
} = pack;
if (!id) {
throw new Error(
'createOrUpdateStickerPack: Provided data did not have a truthy id'
);
}
2022-08-03 17:10:49 +00:00
let { position } = pack;
// Assign default position
if (!isNumber(position)) {
position = db
.prepare<EmptyQuery>(
`
SELECT IFNULL(MAX(position) + 1, 0)
FROM sticker_packs
`
)
.pluck()
.get();
}
const row = db
.prepare<Query>(
`
SELECT id
FROM sticker_packs
WHERE id = $id;
`
)
2022-08-03 17:10:49 +00:00
.get({ id });
const payload = {
2021-07-09 19:36:10 +00:00
attemptedStatus: attemptedStatus ?? null,
author,
coverStickerId,
createdAt: createdAt || Date.now(),
downloadAttempts: downloadAttempts || 1,
id,
2021-07-09 19:36:10 +00:00
installedAt: installedAt ?? null,
key,
lastUsed: lastUsed || null,
status,
stickerCount,
title,
2022-08-03 17:10:49 +00:00
position: position ?? 0,
storageID: storageID ?? null,
storageVersion: storageVersion ?? null,
storageUnknownFields: storageUnknownFields ?? null,
storageNeedsSync: storageNeedsSync ? 1 : 0,
};
2022-08-03 17:10:49 +00:00
if (row) {
db.prepare<Query>(
`
UPDATE sticker_packs SET
attemptedStatus = $attemptedStatus,
author = $author,
coverStickerId = $coverStickerId,
createdAt = $createdAt,
downloadAttempts = $downloadAttempts,
installedAt = $installedAt,
key = $key,
lastUsed = $lastUsed,
status = $status,
stickerCount = $stickerCount,
2022-08-03 17:10:49 +00:00
title = $title,
position = $position,
storageID = $storageID,
storageVersion = $storageVersion,
storageUnknownFields = $storageUnknownFields,
storageNeedsSync = $storageNeedsSync
WHERE id = $id;
`
).run(payload);
return;
}
db.prepare<Query>(
`
INSERT INTO sticker_packs (
attemptedStatus,
author,
coverStickerId,
createdAt,
downloadAttempts,
id,
installedAt,
key,
lastUsed,
status,
stickerCount,
2022-08-03 17:10:49 +00:00
title,
position,
storageID,
storageVersion,
storageUnknownFields,
storageNeedsSync
) values (
$attemptedStatus,
$author,
$coverStickerId,
$createdAt,
$downloadAttempts,
$id,
$installedAt,
$key,
$lastUsed,
$status,
$stickerCount,
2022-08-03 17:10:49 +00:00
$title,
$position,
$storageID,
$storageVersion,
$storageUnknownFields,
$storageNeedsSync
)
`
).run(payload);
}
2024-07-22 18:16:33 +00:00
function updateStickerPackStatus(
db: WritableDB,
id: string,
status: StickerPackStatusType,
options?: { timestamp: number }
2022-08-03 17:10:49 +00:00
): void {
const timestamp = options ? options.timestamp || Date.now() : Date.now();
const installedAt = status === 'installed' ? timestamp : null;
db.prepare<Query>(
`
UPDATE sticker_packs
SET status = $status, installedAt = $installedAt
WHERE id = $id;
`
).run({
id,
status,
installedAt,
});
}
2024-07-22 18:16:33 +00:00
function updateStickerPackInfo(
db: WritableDB,
{
id,
storageID,
storageVersion,
storageUnknownFields,
storageNeedsSync,
uninstalledAt,
}: StickerPackInfoType
): void {
2022-08-03 17:10:49 +00:00
if (uninstalledAt) {
db.prepare<Query>(
`
UPDATE uninstalled_sticker_packs
SET
storageID = $storageID,
storageVersion = $storageVersion,
storageUnknownFields = $storageUnknownFields,
storageNeedsSync = $storageNeedsSync
WHERE id = $id;
`
).run({
id,
storageID: storageID ?? null,
storageVersion: storageVersion ?? null,
storageUnknownFields: storageUnknownFields ?? null,
storageNeedsSync: storageNeedsSync ? 1 : 0,
});
} else {
db.prepare<Query>(
`
UPDATE sticker_packs
SET
storageID = $storageID,
storageVersion = $storageVersion,
storageUnknownFields = $storageUnknownFields,
storageNeedsSync = $storageNeedsSync
WHERE id = $id;
`
).run({
id,
storageID: storageID ?? null,
storageVersion: storageVersion ?? null,
storageUnknownFields: storageUnknownFields ?? null,
storageNeedsSync: storageNeedsSync ? 1 : 0,
});
}
}
2024-07-22 18:16:33 +00:00
function clearAllErrorStickerPackAttempts(db: WritableDB): void {
db.prepare<EmptyQuery>(
`
UPDATE sticker_packs
SET downloadAttempts = 0
WHERE status = 'error';
`
).run();
2021-01-27 22:39:45 +00:00
}
2024-07-22 18:16:33 +00:00
function createOrUpdateSticker(db: WritableDB, sticker: StickerType): void {
2024-07-11 19:44:09 +00:00
const {
emoji,
height,
id,
isCoverOnly,
lastUsed,
packId,
path,
width,
version,
localKey,
size,
} = sticker;
if (!isNumber(id)) {
throw new Error(
'createOrUpdateSticker: Provided data did not have a numeric id'
);
}
if (!packId) {
throw new Error(
'createOrUpdateSticker: Provided data did not have a truthy id'
);
}
db.prepare<Query>(
`
INSERT OR REPLACE INTO stickers (
emoji,
height,
id,
isCoverOnly,
lastUsed,
packId,
path,
2024-07-11 19:44:09 +00:00
width,
version,
localKey,
size
) values (
$emoji,
$height,
$id,
$isCoverOnly,
$lastUsed,
$packId,
$path,
2024-07-11 19:44:09 +00:00
$width,
$version,
$localKey,
$size
)
`
).run({
2021-07-09 19:36:10 +00:00
emoji: emoji ?? null,
height,
id,
isCoverOnly: isCoverOnly ? 1 : 0,
lastUsed: lastUsed || null,
packId,
path,
width,
2024-07-11 19:44:09 +00:00
version: version || 1,
localKey: localKey || null,
size: size || null,
});
}
2024-07-22 18:16:33 +00:00
function createOrUpdateStickers(
db: WritableDB,
2024-07-11 19:44:09 +00:00
stickers: ReadonlyArray<StickerType>
2024-07-22 18:16:33 +00:00
): void {
2024-07-11 19:44:09 +00:00
db.transaction(() => {
for (const sticker of stickers) {
2024-07-22 18:16:33 +00:00
createOrUpdateSticker(db, sticker);
2024-07-11 19:44:09 +00:00
}
})();
}
2024-07-22 18:16:33 +00:00
function updateStickerLastUsed(
db: WritableDB,
packId: string,
stickerId: number,
lastUsed: number
2024-07-22 18:16:33 +00:00
): void {
db.prepare<Query>(
`
UPDATE stickers
SET lastUsed = $lastUsed
WHERE id = $id AND packId = $packId;
`
).run({
id: stickerId,
packId,
lastUsed,
});
db.prepare<Query>(
`
UPDATE sticker_packs
SET lastUsed = $lastUsed
WHERE id = $id;
`
).run({
id: packId,
lastUsed,
});
}
2024-07-22 18:16:33 +00:00
function addStickerPackReference(
db: WritableDB,
messageId: string,
packId: string
2024-07-22 18:16:33 +00:00
): void {
if (!messageId) {
throw new Error(
'addStickerPackReference: Provided data did not have a truthy messageId'
);
}
if (!packId) {
throw new Error(
'addStickerPackReference: Provided data did not have a truthy packId'
);
}
db.prepare<Query>(
`
INSERT OR REPLACE INTO sticker_references (
messageId,
packId
) values (
$messageId,
$packId
)
`
2021-04-09 00:50:25 +00:00
).run({
messageId,
packId,
});
}
2024-07-22 18:16:33 +00:00
function deleteStickerPackReference(
db: WritableDB,
messageId: string,
packId: string
2024-07-22 18:16:33 +00:00
): ReadonlyArray<string> | undefined {
if (!messageId) {
throw new Error(
'addStickerPackReference: Provided data did not have a truthy messageId'
);
}
if (!packId) {
throw new Error(
'addStickerPackReference: Provided data did not have a truthy packId'
);
}
2024-07-22 18:16:33 +00:00
return db.transaction(() => {
// We use an immediate transaction here to immediately acquire an exclusive lock,
// which would normally only happen when we did our first write.
// We need this to ensure that our five queries are all atomic, with no
// other changes happening while we do it:
// 1. Delete our target messageId/packId references
// 2. Check the number of references still pointing at packId
// 3. If that number is zero, get pack from sticker_packs database
// 4. If it's not installed, then grab all of its sticker paths
// 5. If it's not installed, then sticker pack (which cascades to all
// stickers and references)
db.prepare<Query>(
`
DELETE FROM sticker_references
WHERE messageId = $messageId AND packId = $packId;
`
2024-07-22 18:16:33 +00:00
).run({
messageId,
packId,
});
2024-07-22 18:16:33 +00:00
const count = db
.prepare<Query>(
`
2023-01-17 21:07:21 +00:00
SELECT count(1) FROM sticker_references
WHERE packId = $packId;
`
2024-07-22 18:16:33 +00:00
)
.pluck()
.get({ packId });
if (count > 0) {
return undefined;
}
2024-07-22 18:16:33 +00:00
const packRow: { status: StickerPackStatusType } = db
.prepare<Query>(
`
SELECT status FROM sticker_packs
WHERE id = $packId;
`
2024-07-22 18:16:33 +00:00
)
.get({ packId });
if (!packRow) {
logger.warn('deleteStickerPackReference: did not find referenced pack');
return undefined;
}
const { status } = packRow;
2024-07-22 18:16:33 +00:00
if (status === 'installed') {
return undefined;
}
2024-07-22 18:16:33 +00:00
const stickerPathRows: Array<{ path: string }> = db
.prepare<Query>(
`
SELECT path FROM stickers
WHERE packId = $packId;
`
2024-07-22 18:16:33 +00:00
)
.all({
packId,
});
db.prepare<Query>(
`
DELETE FROM sticker_packs
WHERE id = $packId;
`
2024-07-22 18:16:33 +00:00
).run({
packId,
});
2024-07-22 18:16:33 +00:00
return (stickerPathRows || []).map(row => row.path);
})();
}
2024-07-22 18:16:33 +00:00
function deleteStickerPack(db: WritableDB, packId: string): Array<string> {
if (!packId) {
throw new Error(
'deleteStickerPack: Provided data did not have a truthy packId'
);
}
2024-07-22 18:16:33 +00:00
return db.transaction(() => {
// We use an immediate transaction here to immediately acquire an exclusive lock,
// which would normally only happen when we did our first write.
2024-07-22 18:16:33 +00:00
// We need this to ensure that our two queries are atomic, with no other changes
// happening while we do it:
// 1. Grab all of target pack's sticker paths
// 2. Delete sticker pack (which cascades to all stickers and references)
2024-07-22 18:16:33 +00:00
const stickerPathRows: Array<{ path: string }> = db
.prepare<Query>(
`
SELECT path FROM stickers
WHERE packId = $packId;
`
2024-07-22 18:16:33 +00:00
)
.all({
packId,
});
db.prepare<Query>(
`
DELETE FROM sticker_packs
WHERE id = $packId;
`
2024-07-22 18:16:33 +00:00
).run({ packId });
2024-07-22 18:16:33 +00:00
return (stickerPathRows || []).map(row => row.path);
})();
}
2024-07-22 18:16:33 +00:00
function getStickerCount(db: ReadableDB): number {
return getCountFromTable(db, 'stickers');
}
2024-07-22 18:16:33 +00:00
function getAllStickerPacks(db: ReadableDB): Array<StickerPackType> {
const rows = db
.prepare<EmptyQuery>(
`
SELECT * FROM sticker_packs
2022-08-03 17:10:49 +00:00
ORDER BY position ASC, id ASC
`
)
.all();
2023-05-04 18:36:17 +00:00
return rows.map(row => {
return {
...row,
// The columns have STRING type so if they have numeric value, sqlite
// will return integers.
author: String(row.author),
title: String(row.title),
};
});
2022-08-03 17:10:49 +00:00
}
2024-07-22 18:16:33 +00:00
function addUninstalledStickerPack(
db: WritableDB,
2023-10-03 00:27:02 +00:00
pack: UninstalledStickerPackType
): void {
2022-08-03 17:10:49 +00:00
db.prepare<Query>(
`
INSERT OR REPLACE INTO uninstalled_sticker_packs
(
id, uninstalledAt, storageID, storageVersion, storageUnknownFields,
storageNeedsSync
)
VALUES
(
$id, $uninstalledAt, $storageID, $storageVersion, $unknownFields,
$storageNeedsSync
)
`
).run({
id: pack.id,
uninstalledAt: pack.uninstalledAt,
storageID: pack.storageID ?? null,
storageVersion: pack.storageVersion ?? null,
unknownFields: pack.storageUnknownFields ?? null,
storageNeedsSync: pack.storageNeedsSync ? 1 : 0,
});
}
2024-07-22 18:16:33 +00:00
function removeUninstalledStickerPack(db: WritableDB, packId: string): void {
2022-08-03 17:10:49 +00:00
db.prepare<Query>(
'DELETE FROM uninstalled_sticker_packs WHERE id IS $id'
).run({ id: packId });
}
2024-07-22 18:16:33 +00:00
function getUninstalledStickerPacks(
db: ReadableDB
): Array<UninstalledStickerPackType> {
2022-08-03 17:10:49 +00:00
const rows = db
.prepare<EmptyQuery>(
'SELECT * FROM uninstalled_sticker_packs ORDER BY id ASC'
)
.all();
return rows || [];
}
2024-07-22 18:16:33 +00:00
function getInstalledStickerPacks(db: ReadableDB): Array<StickerPackType> {
2022-08-03 17:10:49 +00:00
// If sticker pack has a storageID - it is being downloaded and about to be
// installed so we better sync it back to storage service if asked.
const rows = db
.prepare<EmptyQuery>(
`
SELECT *
FROM sticker_packs
WHERE
2023-01-18 22:12:33 +00:00
status IS 'installed' OR
2022-08-03 17:10:49 +00:00
storageID IS NOT NULL
ORDER BY id ASC
`
)
.all();
return rows || [];
}
2024-07-22 18:16:33 +00:00
function getStickerPackInfo(
db: ReadableDB,
2022-08-03 17:10:49 +00:00
packId: string
2024-07-22 18:16:33 +00:00
): StickerPackInfoType | undefined {
2022-08-03 17:10:49 +00:00
return db.transaction(() => {
const uninstalled = db
.prepare<Query>(
`
SELECT * FROM uninstalled_sticker_packs
WHERE id IS $packId
`
)
.get({ packId });
if (uninstalled) {
return uninstalled as UninstalledStickerPackType;
}
const installed = db
.prepare<Query>(
`
SELECT
id, key, position, storageID, storageVersion, storageUnknownFields
FROM sticker_packs
WHERE id IS $packId
`
)
.get({ packId });
if (installed) {
return installed as InstalledStickerPackType;
}
return undefined;
})();
}
2024-07-22 18:16:33 +00:00
function installStickerPack(
db: WritableDB,
2022-08-03 17:10:49 +00:00
packId: string,
timestamp: number
2024-07-22 18:16:33 +00:00
): void {
2022-08-03 17:10:49 +00:00
return db.transaction(() => {
const status = 'installed';
2024-07-22 18:16:33 +00:00
updateStickerPackStatus(db, packId, status, { timestamp });
2022-08-03 17:10:49 +00:00
2024-07-22 18:16:33 +00:00
removeUninstalledStickerPack(db, packId);
2022-08-03 17:10:49 +00:00
})();
}
2024-07-22 18:16:33 +00:00
function uninstallStickerPack(
db: WritableDB,
2022-08-03 17:10:49 +00:00
packId: string,
timestamp: number
2024-07-22 18:16:33 +00:00
): void {
2022-08-03 17:10:49 +00:00
return db.transaction(() => {
const status = 'downloaded';
2024-07-22 18:16:33 +00:00
updateStickerPackStatus(db, packId, status);
2022-08-03 17:10:49 +00:00
db.prepare<Query>(
`
UPDATE sticker_packs SET
storageID = NULL,
storageVersion = NULL,
storageUnknownFields = NULL,
storageNeedsSync = 0
WHERE id = $packId;
`
).run({ packId });
2024-07-22 18:16:33 +00:00
addUninstalledStickerPack(db, {
2022-08-03 17:10:49 +00:00
id: packId,
uninstalledAt: timestamp,
storageNeedsSync: true,
});
})();
}
2024-07-22 18:16:33 +00:00
function getAllStickers(db: ReadableDB): Array<StickerType> {
const rows = db
.prepare<EmptyQuery>(
`
SELECT * FROM stickers
ORDER BY packId ASC, id ASC
`
)
.all();
return (rows || []).map(row => rowToSticker(row));
}
2024-07-22 18:16:33 +00:00
function getRecentStickers(
db: ReadableDB,
{ limit }: { limit?: number } = {}
): Array<StickerType> {
// Note: we avoid 'IS NOT NULL' here because it does seem to bypass our index
const rows = db
.prepare<Query>(
`
SELECT stickers.* FROM stickers
JOIN sticker_packs on stickers.packId = sticker_packs.id
WHERE stickers.lastUsed > 0 AND sticker_packs.status = 'installed'
ORDER BY stickers.lastUsed DESC
LIMIT $limit
`
)
.all({
limit: limit || 24,
});
return (rows || []).map(row => rowToSticker(row));
}
2019-05-24 23:58:27 +00:00
// Emojis
2024-07-22 18:16:33 +00:00
function updateEmojiUsage(
db: WritableDB,
shortName: string,
timeUsed: number = Date.now()
2024-07-22 18:16:33 +00:00
): void {
db.transaction(() => {
const rows = db
.prepare<Query>(
`
SELECT * FROM emojis
WHERE shortName = $shortName;
`
)
.get({
shortName,
});
if (rows) {
db.prepare<Query>(
`
UPDATE emojis
SET lastUsage = $timeUsed
WHERE shortName = $shortName;
`
).run({ shortName, timeUsed });
} else {
db.prepare<Query>(
`
INSERT INTO emojis(shortName, lastUsage)
VALUES ($shortName, $timeUsed);
`
).run({ shortName, timeUsed });
2019-05-24 23:58:27 +00:00
}
})();
2019-05-24 23:58:27 +00:00
}
2024-07-22 18:16:33 +00:00
function getRecentEmojis(db: ReadableDB, limit = 32): Array<EmojiType> {
const rows = db
.prepare<Query>(
`
SELECT *
FROM emojis
ORDER BY lastUsage DESC
LIMIT $limit;
`
)
.all({ limit });
2019-05-24 23:58:27 +00:00
return rows || [];
}
2024-07-22 18:16:33 +00:00
function getAllBadges(db: ReadableDB): Array<BadgeType> {
2021-11-02 23:01:13 +00:00
const [badgeRows, badgeImageFileRows] = db.transaction(() => [
db.prepare<EmptyQuery>('SELECT * FROM badges').all(),
db.prepare<EmptyQuery>('SELECT * FROM badgeImageFiles').all(),
])();
const badgeImagesByBadge = new Map<
string,
Array<undefined | BadgeImageType>
>();
for (const badgeImageFileRow of badgeImageFileRows) {
const { badgeId, order, localPath, url, theme } = badgeImageFileRow;
const badgeImages = badgeImagesByBadge.get(badgeId) || [];
badgeImages[order] = {
...(badgeImages[order] || {}),
[parseBadgeImageTheme(theme)]: {
localPath: dropNull(localPath),
url,
},
};
badgeImagesByBadge.set(badgeId, badgeImages);
}
return badgeRows.map(badgeRow => ({
id: badgeRow.id,
category: parseBadgeCategory(badgeRow.category),
name: badgeRow.name,
descriptionTemplate: badgeRow.descriptionTemplate,
images: (badgeImagesByBadge.get(badgeRow.id) || []).filter(isNotNil),
}));
}
// This should match the logic in the badges Redux reducer.
2024-07-22 18:16:33 +00:00
function updateOrCreateBadges(
db: WritableDB,
2021-11-02 23:01:13 +00:00
badges: ReadonlyArray<BadgeType>
2024-07-22 18:16:33 +00:00
): void {
2021-11-02 23:01:13 +00:00
const insertBadge = prepare<Query>(
db,
`
INSERT OR REPLACE INTO badges (
id,
category,
name,
descriptionTemplate
) VALUES (
$id,
$category,
$name,
$descriptionTemplate
);
`
);
const getImageFilesForBadge = prepare<Query>(
db,
'SELECT url, localPath FROM badgeImageFiles WHERE badgeId = $badgeId'
);
const insertBadgeImageFile = prepare<Query>(
db,
`
INSERT INTO badgeImageFiles (
badgeId,
'order',
url,
localPath,
theme
) VALUES (
$badgeId,
$order,
$url,
$localPath,
$theme
);
`
);
db.transaction(() => {
badges.forEach(badge => {
const { id: badgeId } = badge;
const oldLocalPaths = new Map<string, string>();
for (const { url, localPath } of getImageFilesForBadge.all({ badgeId })) {
if (localPath) {
oldLocalPaths.set(url, localPath);
}
}
insertBadge.run({
id: badgeId,
category: badge.category,
name: badge.name,
descriptionTemplate: badge.descriptionTemplate,
});
for (const [order, image] of badge.images.entries()) {
for (const [theme, imageFile] of Object.entries(image)) {
insertBadgeImageFile.run({
badgeId,
localPath:
imageFile.localPath || oldLocalPaths.get(imageFile.url) || null,
order,
theme,
url: imageFile.url,
});
}
}
});
})();
}
2024-07-22 18:16:33 +00:00
function badgeImageFileDownloaded(
db: WritableDB,
2021-11-02 23:01:13 +00:00
url: string,
localPath: string
2024-07-22 18:16:33 +00:00
): void {
2021-11-02 23:01:13 +00:00
prepare<Query>(
db,
'UPDATE badgeImageFiles SET localPath = $localPath WHERE url = $url'
).run({ url, localPath });
}
2024-07-22 18:16:33 +00:00
function getAllBadgeImageFileLocalPaths(db: ReadableDB): Set<string> {
2021-11-02 23:01:13 +00:00
const localPaths = db
.prepare<EmptyQuery>(
'SELECT localPath FROM badgeImageFiles WHERE localPath IS NOT NULL'
)
.pluck()
.all();
return new Set(localPaths);
}
2024-07-22 18:16:33 +00:00
function runCorruptionChecks(db: ReadableDB): void {
let writable: WritableDB;
try {
2024-07-22 18:16:33 +00:00
writable = toUnsafeWritableDB(db, 'integrity check');
} catch (error) {
logger.error(
'runCorruptionChecks: not running the check, no writable instance',
Errors.toLogFormat(error)
);
return;
}
try {
2024-07-22 18:16:33 +00:00
const result = writable.pragma('integrity_check');
if (result.length === 1 && result.at(0)?.integrity_check === 'ok') {
logger.info('runCorruptionChecks: general integrity is ok');
} else {
logger.error('runCorruptionChecks: general integrity is not ok', result);
}
} catch (error) {
logger.error(
'runCorruptionChecks: general integrity check error',
Errors.toLogFormat(error)
);
}
try {
2024-07-22 18:16:33 +00:00
writable.exec(
"INSERT INTO messages_fts(messages_fts) VALUES('integrity-check')"
);
logger.info('runCorruptionChecks: FTS5 integrity ok');
} catch (error) {
logger.error(
'runCorruptionChecks: FTS5 integrity check error.',
Errors.toLogFormat(error)
);
}
}
type StoryDistributionForDatabase = Readonly<
{
2022-07-01 00:52:03 +00:00
allowsReplies: 0 | 1;
deletedAtTimestamp: number | null;
isBlockList: 0 | 1;
senderKeyInfoJson: string | null;
storageID: string | null;
storageVersion: number | null;
2022-07-01 00:52:03 +00:00
storageNeedsSync: 0 | 1;
} & Omit<
StoryDistributionType,
| 'allowsReplies'
| 'deletedAtTimestamp'
| 'isBlockList'
| 'senderKeyInfo'
| 'storageID'
| 'storageVersion'
2022-07-01 00:52:03 +00:00
| 'storageNeedsSync'
>
>;
function hydrateStoryDistribution(
fromDatabase: StoryDistributionForDatabase
): StoryDistributionType {
return {
...omit(fromDatabase, 'senderKeyInfoJson'),
2022-07-01 00:52:03 +00:00
allowsReplies: Boolean(fromDatabase.allowsReplies),
deletedAtTimestamp: fromDatabase.deletedAtTimestamp || undefined,
isBlockList: Boolean(fromDatabase.isBlockList),
senderKeyInfo: fromDatabase.senderKeyInfoJson
? JSON.parse(fromDatabase.senderKeyInfoJson)
: undefined,
storageID: fromDatabase.storageID || undefined,
storageVersion: fromDatabase.storageVersion || undefined,
2022-07-01 00:52:03 +00:00
storageNeedsSync: Boolean(fromDatabase.storageNeedsSync),
storageUnknownFields: fromDatabase.storageUnknownFields || undefined,
};
}
function freezeStoryDistribution(
story: StoryDistributionType
): StoryDistributionForDatabase {
return {
...omit(story, 'senderKeyInfo'),
2022-07-01 00:52:03 +00:00
allowsReplies: story.allowsReplies ? 1 : 0,
deletedAtTimestamp: story.deletedAtTimestamp || null,
isBlockList: story.isBlockList ? 1 : 0,
senderKeyInfoJson: story.senderKeyInfo
? JSON.stringify(story.senderKeyInfo)
: null,
storageID: story.storageID || null,
storageVersion: story.storageVersion || null,
2022-07-01 00:52:03 +00:00
storageNeedsSync: story.storageNeedsSync ? 1 : 0,
storageUnknownFields: story.storageUnknownFields || null,
};
}
2024-07-22 18:16:33 +00:00
function _getAllStoryDistributions(
db: ReadableDB
): Array<StoryDistributionType> {
const storyDistributions = db
.prepare<EmptyQuery>('SELECT * FROM storyDistributions;')
.all();
return storyDistributions.map(hydrateStoryDistribution);
}
2024-07-22 18:16:33 +00:00
function _getAllStoryDistributionMembers(
db: ReadableDB
): Array<StoryDistributionMemberType> {
return db
.prepare<EmptyQuery>('SELECT * FROM storyDistributionMembers;')
.all();
}
2024-07-22 18:16:33 +00:00
function _deleteAllStoryDistributions(db: WritableDB): void {
db.prepare<EmptyQuery>('DELETE FROM storyDistributions;').run();
}
2024-07-22 18:16:33 +00:00
function createNewStoryDistribution(
db: WritableDB,
distribution: StoryDistributionWithMembersType
2024-07-22 18:16:33 +00:00
): void {
strictAssert(
distribution.name,
'Distribution list does not have a valid name'
);
db.transaction(() => {
const payload = freezeStoryDistribution(distribution);
prepare(
db,
`
INSERT INTO storyDistributions(
id,
name,
2022-07-01 00:52:03 +00:00
deletedAtTimestamp,
allowsReplies,
isBlockList,
senderKeyInfoJson,
storageID,
storageVersion,
storageUnknownFields,
storageNeedsSync
) VALUES (
$id,
$name,
2022-07-01 00:52:03 +00:00
$deletedAtTimestamp,
$allowsReplies,
$isBlockList,
$senderKeyInfoJson,
$storageID,
$storageVersion,
$storageUnknownFields,
$storageNeedsSync
);
`
).run(payload);
const { id: listId, members } = distribution;
const memberInsertStatement = prepare(
db,
`
INSERT OR REPLACE INTO storyDistributionMembers (
listId,
2023-08-16 20:54:39 +00:00
serviceId
) VALUES (
$listId,
2023-08-16 20:54:39 +00:00
$serviceId
);
`
);
2023-08-16 20:54:39 +00:00
for (const serviceId of members) {
memberInsertStatement.run({
listId,
2023-08-16 20:54:39 +00:00
serviceId,
});
}
})();
}
2024-07-22 18:16:33 +00:00
function getAllStoryDistributionsWithMembers(
db: ReadableDB
): Array<StoryDistributionWithMembersType> {
const allDistributions = _getAllStoryDistributions(db);
const allMembers = _getAllStoryDistributionMembers(db);
const byListId = groupBy(allMembers, member => member.listId);
return allDistributions.map(list => ({
...list,
2023-08-16 20:54:39 +00:00
members: (byListId[list.id] || []).map(member => member.serviceId),
}));
}
2024-07-22 18:16:33 +00:00
function getStoryDistributionWithMembers(
db: ReadableDB,
2022-03-04 21:14:52 +00:00
id: string
2024-07-22 18:16:33 +00:00
): StoryDistributionWithMembersType | undefined {
const storyDistribution: StoryDistributionForDatabase | undefined = prepare(
2022-03-04 21:14:52 +00:00
db,
'SELECT * FROM storyDistributions WHERE id = $id;'
).get({
id,
});
if (!storyDistribution) {
return undefined;
}
const members = prepare(
db,
'SELECT * FROM storyDistributionMembers WHERE listId = $id;'
).all({
id,
});
return {
...hydrateStoryDistribution(storyDistribution),
2023-08-16 20:54:39 +00:00
members: members.map(({ serviceId }) => serviceId),
2022-03-04 21:14:52 +00:00
};
}
2024-07-22 18:16:33 +00:00
function modifyStoryDistribution(
db: WritableDB,
distribution: StoryDistributionType
2022-07-01 00:52:03 +00:00
): void {
2024-07-22 18:16:33 +00:00
const payload = freezeStoryDistribution(distribution);
if (payload.deletedAtTimestamp) {
strictAssert(
!payload.name,
'Attempt to delete distribution list but still has a name'
);
} else {
strictAssert(
payload.name,
'Cannot clear distribution list name without deletedAtTimestamp set'
);
}
prepare(
db,
`
UPDATE storyDistributions
SET
name = $name,
2022-07-01 00:52:03 +00:00
deletedAtTimestamp = $deletedAtTimestamp,
allowsReplies = $allowsReplies,
isBlockList = $isBlockList,
senderKeyInfoJson = $senderKeyInfoJson,
storageID = $storageID,
storageVersion = $storageVersion,
storageUnknownFields = $storageUnknownFields,
storageNeedsSync = $storageNeedsSync
WHERE id = $id
`
).run(payload);
}
2024-07-22 18:16:33 +00:00
function modifyStoryDistributionMembers(
db: WritableDB,
listId: string,
{
toAdd,
toRemove,
}: { toAdd: Array<ServiceIdString>; toRemove: Array<ServiceIdString> }
2024-07-22 18:16:33 +00:00
): void {
2022-07-01 00:52:03 +00:00
const memberInsertStatement = prepare(
db,
`
INSERT OR REPLACE INTO storyDistributionMembers (
listId,
2023-08-16 20:54:39 +00:00
serviceId
2022-07-01 00:52:03 +00:00
) VALUES (
$listId,
2023-08-16 20:54:39 +00:00
$serviceId
2022-07-01 00:52:03 +00:00
);
`
);
2023-08-16 20:54:39 +00:00
for (const serviceId of toAdd) {
2022-07-01 00:52:03 +00:00
memberInsertStatement.run({
listId,
2023-08-16 20:54:39 +00:00
serviceId,
2022-07-01 00:52:03 +00:00
});
}
2023-08-16 20:54:39 +00:00
batchMultiVarQuery(
db,
toRemove,
(serviceIds: ReadonlyArray<ServiceIdString>) => {
2023-11-22 22:48:53 +00:00
const serviceIdSet = sqlJoin(serviceIds);
2023-08-16 20:54:39 +00:00
const [sqlQuery, sqlParams] = sql`
DELETE FROM storyDistributionMembers
WHERE listId = ${listId} AND serviceId IN (${serviceIdSet});
`;
db.prepare(sqlQuery).run(sqlParams);
}
);
2022-07-01 00:52:03 +00:00
}
2024-07-22 18:16:33 +00:00
function modifyStoryDistributionWithMembers(
db: WritableDB,
2022-07-01 00:52:03 +00:00
distribution: StoryDistributionType,
{
toAdd,
toRemove,
}: { toAdd: Array<ServiceIdString>; toRemove: Array<ServiceIdString> }
2024-07-22 18:16:33 +00:00
): void {
2022-07-01 00:52:03 +00:00
if (toAdd.length || toRemove.length) {
db.transaction(() => {
2024-07-22 18:16:33 +00:00
modifyStoryDistribution(db, distribution);
modifyStoryDistributionMembers(db, distribution.id, { toAdd, toRemove });
2022-07-01 00:52:03 +00:00
})();
} else {
2024-07-22 18:16:33 +00:00
modifyStoryDistribution(db, distribution);
2022-07-01 00:52:03 +00:00
}
}
2024-07-22 18:16:33 +00:00
function deleteStoryDistribution(
db: WritableDB,
id: StoryDistributionIdString
2024-07-22 18:16:33 +00:00
): void {
db.prepare<Query>('DELETE FROM storyDistributions WHERE id = $id;').run({
id,
});
}
2024-07-22 18:16:33 +00:00
function _getAllStoryReads(db: ReadableDB): Array<StoryReadType> {
return db.prepare<EmptyQuery>('SELECT * FROM storyReads;').all();
}
2024-07-22 18:16:33 +00:00
function _deleteAllStoryReads(db: WritableDB): void {
db.prepare<EmptyQuery>('DELETE FROM storyReads;').run();
}
2024-07-22 18:16:33 +00:00
function addNewStoryRead(db: WritableDB, read: StoryReadType): void {
prepare(
db,
`
INSERT OR REPLACE INTO storyReads(
authorId,
conversationId,
storyId,
storyReadDate
) VALUES (
$authorId,
$conversationId,
$storyId,
$storyReadDate
);
`
).run(read);
}
2024-07-22 18:16:33 +00:00
function getLastStoryReadsForAuthor(
db: ReadableDB,
{
authorId,
conversationId,
limit: initialLimit,
}: {
authorId: ServiceIdString;
conversationId?: string;
limit?: number;
}
): Array<StoryReadType> {
const limit = initialLimit || 5;
return db
.prepare<Query>(
`
SELECT * FROM storyReads
WHERE
authorId = $authorId AND
($conversationId IS NULL OR conversationId = $conversationId)
ORDER BY storyReadDate DESC
LIMIT $limit;
`
)
.all({
authorId,
conversationId: conversationId || null,
limit,
});
}
2024-07-22 18:16:33 +00:00
function countStoryReadsByConversation(
db: ReadableDB,
2022-03-29 01:10:08 +00:00
conversationId: string
2024-07-22 18:16:33 +00:00
): number {
2022-03-29 01:10:08 +00:00
return db
.prepare<Query>(
`
2023-01-17 21:07:21 +00:00
SELECT count(1) FROM storyReads
2022-03-29 01:10:08 +00:00
WHERE conversationId = $conversationId;
`
)
.pluck()
.get({ conversationId });
}
2018-10-18 01:01:21 +00:00
// All data in database
2024-07-22 18:16:33 +00:00
function removeAll(db: WritableDB): void {
db.transaction(() => {
db.exec(`
2023-08-25 16:29:42 +00:00
--- Remove messages delete trigger for performance
DROP TRIGGER messages_on_delete;
DELETE FROM attachment_downloads;
2024-05-29 23:46:43 +00:00
DELETE FROM attachment_backup_jobs;
DELETE FROM backup_cdn_object_metadata;
2021-11-02 23:01:13 +00:00
DELETE FROM badgeImageFiles;
DELETE FROM badges;
2024-05-17 23:22:51 +00:00
DELETE FROM callLinks;
2023-08-09 00:53:06 +00:00
DELETE FROM callsHistory;
DELETE FROM conversations;
DELETE FROM emojis;
DELETE FROM groupCallRingCancellations;
2024-05-20 18:15:39 +00:00
DELETE FROM groupSendCombinedEndorsement;
DELETE FROM groupSendMemberEndorsement;
DELETE FROM identityKeys;
DELETE FROM items;
DELETE FROM jobs;
DELETE FROM kyberPreKeys;
DELETE FROM messages_fts;
DELETE FROM messages;
DELETE FROM preKeys;
DELETE FROM reactions;
DELETE FROM senderKeys;
DELETE FROM sendLogMessageIds;
DELETE FROM sendLogPayloads;
DELETE FROM sendLogRecipients;
DELETE FROM sessions;
DELETE FROM signedPreKeys;
DELETE FROM sticker_packs;
DELETE FROM sticker_references;
DELETE FROM stickers;
DELETE FROM storyDistributionMembers;
DELETE FROM storyDistributions;
DELETE FROM storyReads;
DELETE FROM syncTasks;
DELETE FROM unprocessed;
DELETE FROM uninstalled_sticker_packs;
2023-01-26 23:53:22 +00:00
2023-10-10 23:55:32 +00:00
INSERT INTO messages_fts(messages_fts) VALUES('optimize');
2023-08-25 16:29:42 +00:00
--- Re-create the messages delete trigger
--- See migration 45
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = old.id
);
DELETE FROM reactions WHERE rowid IN (
SELECT rowid FROM reactions
WHERE messageId = old.id
);
DELETE FROM storyReads WHERE storyId = old.storyId;
END;
`);
})();
2018-10-18 01:01:21 +00:00
}
// Anything that isn't user-visible data
2024-07-22 18:16:33 +00:00
function removeAllConfiguration(db: WritableDB): void {
db.transaction(() => {
2021-05-27 20:47:39 +00:00
db.exec(
2021-05-25 22:40:04 +00:00
`
2024-05-29 23:46:43 +00:00
DELETE FROM attachment_backup_jobs;
DELETE FROM backup_cdn_object_metadata;
2024-05-20 18:15:39 +00:00
DELETE FROM groupSendCombinedEndorsement;
DELETE FROM groupSendMemberEndorsement;
DELETE FROM jobs;
DELETE FROM kyberPreKeys;
DELETE FROM preKeys;
DELETE FROM senderKeys;
DELETE FROM sendLogMessageIds;
DELETE FROM sendLogPayloads;
DELETE FROM sendLogRecipients;
DELETE FROM sessions;
DELETE FROM signedPreKeys;
DELETE FROM syncTasks;
DELETE FROM unprocessed;
`
2021-05-27 20:47:39 +00:00
);
const itemIds: ReadonlyArray<string> = db
.prepare<EmptyQuery>('SELECT id FROM items')
.pluck(true)
.all();
const allowedSet = new Set<string>(STORAGE_UI_KEYS);
for (const id of itemIds) {
if (!allowedSet.has(id)) {
removeById(db, 'items', id);
}
}
db.exec(
2023-10-11 18:38:03 +00:00
`
UPDATE conversations
SET
json = json_remove(
json,
'$.senderKeyInfo',
'$.storageID',
'$.needsStorageServiceSync',
'$.storageUnknownFields'
);
2023-10-11 18:38:03 +00:00
UPDATE storyDistributions SET senderKeyInfoJson = NULL;
`
2021-05-27 20:47:39 +00:00
);
})();
}
2024-07-22 18:16:33 +00:00
function eraseStorageServiceState(db: WritableDB): void {
db.exec(`
-- Conversations
UPDATE conversations
SET
json = json_remove(json, '$.storageID', '$.needsStorageServiceSync', '$.storageUnknownFields');
-- Stickers
UPDATE sticker_packs
SET
storageID = null,
storageVersion = null,
storageUnknownFields = null,
storageNeedsSync = 0;
UPDATE uninstalled_sticker_packs
SET
storageID = null,
storageVersion = null,
storageUnknownFields = null,
storageNeedsSync = 0;
-- Story Distribution Lists
UPDATE storyDistributions
SET
storageID = null,
storageVersion = null,
storageUnknownFields = null,
storageNeedsSync = 0;
`);
}
const MAX_MESSAGE_MIGRATION_ATTEMPTS = 5;
2024-07-22 18:16:33 +00:00
function getMessagesNeedingUpgrade(
db: ReadableDB,
limit: number,
{ maxVersion }: { maxVersion: number }
2024-07-22 18:16:33 +00:00
): Array<MessageType> {
const rows: JSONRows = db
.prepare<Query>(
`
SELECT json
FROM messages
WHERE
(schemaVersion IS NULL OR schemaVersion < $maxVersion) AND
IFNULL(
json_extract(json, '$.schemaMigrationAttempts'),
0
) < $maxAttempts
LIMIT $limit;
`
)
.all({
maxVersion,
maxAttempts: MAX_MESSAGE_MIGRATION_ATTEMPTS,
limit,
});
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getMessagesWithVisualMediaAttachments(
db: ReadableDB,
conversationId: string,
{ limit }: { limit: number }
2024-07-22 18:16:33 +00:00
): Array<MessageType> {
const rows: JSONRows = db
.prepare<Query>(
`
2023-03-04 03:03:15 +00:00
SELECT json FROM messages
INDEXED BY messages_hasVisualMediaAttachments
WHERE
isStory IS 0 AND
storyId IS NULL AND
conversationId = $conversationId AND
2023-03-04 03:03:15 +00:00
-- Note that this check has to use 'IS' to utilize
-- 'messages_hasVisualMediaAttachments' INDEX
hasVisualMediaAttachments IS 1
ORDER BY received_at DESC, sent_at DESC
LIMIT $limit;
`
)
.all({
conversationId,
limit,
});
return rows.map(row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getMessagesWithFileAttachments(
db: ReadableDB,
conversationId: string,
{ limit }: { limit: number }
2024-07-22 18:16:33 +00:00
): Array<MessageType> {
const rows = db
.prepare<Query>(
`
SELECT json FROM messages WHERE
isStory IS 0 AND
storyId IS NULL AND
conversationId = $conversationId AND
hasFileAttachments = 1
ORDER BY received_at DESC, sent_at DESC
LIMIT $limit;
`
)
.all({
conversationId,
limit,
});
return map(rows, row => jsonToObject(row.json));
}
2024-07-22 18:16:33 +00:00
function getMessageServerGuidsForSpam(
db: ReadableDB,
conversationId: string
2024-07-22 18:16:33 +00:00
): Array<string> {
// The server's maximum is 3, which is why you see `LIMIT 3` in this query. Note that we
// use `pluck` here to only get the first column!
return db
.prepare<Query>(
`
SELECT serverGuid
FROM messages
WHERE conversationId = $conversationId
AND type = 'incoming'
AND serverGuid IS NOT NULL
ORDER BY received_at DESC, sent_at DESC
LIMIT 3;
`
)
.pluck(true)
.all({ conversationId });
}
function getExternalFilesForMessage(message: MessageType): Array<string> {
const { attachments, contact, quote, preview, sticker } = message;
const files: Array<string> = [];
forEach(attachments, attachment => {
const {
path: file,
thumbnail,
screenshot,
thumbnailFromBackup,
} = attachment;
if (file) {
files.push(file);
}
if (thumbnail && thumbnail.path) {
files.push(thumbnail.path);
}
if (screenshot && screenshot.path) {
files.push(screenshot.path);
}
if (thumbnailFromBackup && thumbnailFromBackup.path) {
files.push(thumbnailFromBackup.path);
}
});
if (quote && quote.attachments && quote.attachments.length) {
forEach(quote.attachments, attachment => {
const { thumbnail } = attachment;
if (thumbnail && thumbnail.path) {
files.push(thumbnail.path);
}
});
}
if (contact && contact.length) {
forEach(contact, item => {
const { avatar } = item;
if (avatar && avatar.avatar && avatar.avatar.path) {
files.push(avatar.avatar.path);
}
});
}
2019-01-16 03:03:56 +00:00
if (preview && preview.length) {
forEach(preview, item => {
const { image } = item;
if (image && image.path) {
files.push(image.path);
}
});
}
if (sticker && sticker.data && sticker.data.path) {
files.push(sticker.data.path);
if (sticker.data.thumbnail && sticker.data.thumbnail.path) {
files.push(sticker.data.thumbnail.path);
}
}
return files;
}
function getExternalFilesForConversation(
conversation: Pick<ConversationType, 'avatar' | 'profileAvatar'>
): Array<string> {
2018-09-21 01:47:19 +00:00
const { avatar, profileAvatar } = conversation;
const files: Array<string> = [];
2018-09-21 01:47:19 +00:00
if (avatar && avatar.path) {
files.push(avatar.path);
}
if (profileAvatar && profileAvatar.path) {
files.push(profileAvatar.path);
}
return files;
}
function getExternalDraftFilesForConversation(
conversation: Pick<ConversationType, 'draftAttachments'>
): Array<string> {
2019-08-07 00:40:25 +00:00
const draftAttachments = conversation.draftAttachments || [];
const files: Array<string> = [];
2019-08-07 00:40:25 +00:00
forEach(draftAttachments, attachment => {
2021-08-30 21:32:56 +00:00
if (attachment.pending) {
return;
}
2019-08-07 00:40:25 +00:00
const { path: file, screenshotPath } = attachment;
if (file) {
files.push(file);
}
if (screenshotPath) {
files.push(screenshotPath);
}
});
return files;
}
2024-07-22 18:16:33 +00:00
function getKnownMessageAttachments(
db: ReadableDB,
cursor?: MessageAttachmentsCursorType
2024-07-22 18:16:33 +00:00
): GetKnownMessageAttachmentsResultType {
2024-03-15 14:20:33 +00:00
const innerCursor = cursor as MessageCursorType | undefined as
| PageMessagesCursorType
| undefined;
const result = new Set<string>();
2024-03-15 14:20:33 +00:00
2024-07-22 18:16:33 +00:00
const { messages, cursor: newCursor } = pageMessages(db, innerCursor);
2024-03-15 14:20:33 +00:00
for (const message of messages) {
const externalFiles = getExternalFilesForMessage(message);
forEach(externalFiles, file => result.add(file));
}
return {
attachments: Array.from(result),
cursor: newCursor as MessageCursorType as MessageAttachmentsCursorType,
};
}
2024-07-22 18:16:33 +00:00
function finishGetKnownMessageAttachments(
db: ReadableDB,
2024-03-15 14:20:33 +00:00
cursor: MessageAttachmentsCursorType
2024-07-22 18:16:33 +00:00
): void {
2024-03-15 14:20:33 +00:00
const innerCursor = cursor as MessageCursorType as PageMessagesCursorType;
2024-07-22 18:16:33 +00:00
finishPageMessages(db, innerCursor);
2024-03-15 14:20:33 +00:00
}
2024-07-22 18:16:33 +00:00
function pageMessages(
db: ReadableDB,
2024-03-15 14:20:33 +00:00
cursor?: PageMessagesCursorType
2024-07-22 18:16:33 +00:00
): PageMessagesResultType {
const writable = toUnsafeWritableDB(db, 'only temp table use');
const chunkSize = 1000;
2024-07-22 18:16:33 +00:00
return writable.transaction(() => {
let count = cursor?.count ?? 0;
2024-03-15 14:20:33 +00:00
strictAssert(!cursor?.done, 'pageMessages: iteration cannot be restarted');
let runId: string;
if (cursor === undefined) {
runId = randomBytes(8).toString('hex');
2024-07-22 18:16:33 +00:00
const total = getMessageCount(db);
logger.info(
2024-03-15 14:20:33 +00:00
`pageMessages(${runId}): ` +
`Starting iteration through ${total} messages`
);
2024-07-22 18:16:33 +00:00
writable.exec(
`
CREATE TEMP TABLE tmp_${runId}_updated_messages
(rowid INTEGER PRIMARY KEY ASC);
INSERT INTO tmp_${runId}_updated_messages (rowid)
2024-03-15 14:20:33 +00:00
SELECT rowid FROM messages ORDER BY rowid ASC;
CREATE TEMP TRIGGER tmp_${runId}_message_updates
UPDATE OF json ON messages
BEGIN
INSERT OR IGNORE INTO tmp_${runId}_updated_messages (rowid)
VALUES (NEW.rowid);
END;
CREATE TEMP TRIGGER tmp_${runId}_message_inserts
AFTER INSERT ON messages
BEGIN
INSERT OR IGNORE INTO tmp_${runId}_updated_messages (rowid)
VALUES (NEW.rowid);
END;
`
);
} else {
({ runId } = cursor);
}
2024-07-22 18:16:33 +00:00
const rowids: Array<number> = writable
.prepare<Query>(
`
DELETE FROM tmp_${runId}_updated_messages
RETURNING rowid
2024-03-15 14:20:33 +00:00
ORDER BY rowid ASC
LIMIT $chunkSize;
`
)
.pluck()
.all({ chunkSize });
const messages = batchMultiVarQuery(
2024-07-22 18:16:33 +00:00
writable,
rowids,
(batch: ReadonlyArray<number>): Array<MessageType> => {
2024-07-22 18:16:33 +00:00
const query = writable.prepare<ArrayQuery>(
`SELECT json FROM messages WHERE rowid IN (${Array(batch.length)
.fill('?')
.join(',')});`
);
const rows: JSONRows = query.all(batch);
return rows.map(row => jsonToObject(row.json));
}
);
2024-03-15 14:20:33 +00:00
count += messages.length;
2022-11-17 20:06:19 +00:00
const done = rowids.length < chunkSize;
2024-03-15 14:20:33 +00:00
const newCursor: MessageCursorType = { runId, count, done };
return {
2024-03-15 14:20:33 +00:00
messages,
cursor: newCursor as PageMessagesCursorType,
};
})();
}
2024-07-22 18:16:33 +00:00
function finishPageMessages(
db: ReadableDB,
{ runId, count, done }: PageMessagesCursorType
): void {
const writable = toUnsafeWritableDB(db, 'only temp table use');
2024-03-15 14:20:33 +00:00
const logId = `finishPageMessages(${runId})`;
if (!done) {
logger.warn(`${logId}: iteration not finished`);
}
logger.info(`${logId}: reached the end after processing ${count} messages`);
2024-07-22 18:16:33 +00:00
writable.exec(`
DROP TABLE tmp_${runId}_updated_messages;
DROP TRIGGER tmp_${runId}_message_updates;
DROP TRIGGER tmp_${runId}_message_inserts;
`);
}
2024-07-22 18:16:33 +00:00
function getKnownConversationAttachments(db: ReadableDB): Array<string> {
const result = new Set<string>();
const chunkSize = 500;
2021-10-26 22:59:08 +00:00
let complete = false;
let id = '';
2018-09-21 01:47:19 +00:00
2024-07-22 18:16:33 +00:00
const conversationTotal = getConversationCount(db);
logger.info(
'getKnownConversationAttachments: About to iterate through ' +
`${conversationTotal}`
2018-09-21 01:47:19 +00:00
);
2021-10-05 16:36:07 +00:00
const fetchConversations = db.prepare<Query>(
`
SELECT json FROM conversations
WHERE id > $id
ORDER BY id ASC
LIMIT $chunkSize;
`
);
2018-09-21 01:47:19 +00:00
while (!complete) {
2021-10-05 16:36:07 +00:00
const rows = fetchConversations.all({
id,
chunkSize,
});
2018-09-21 01:47:19 +00:00
const conversations: Array<ConversationType> = map(rows, row =>
jsonToObject(row.json)
);
conversations.forEach(conversation => {
2018-09-21 01:47:19 +00:00
const externalFiles = getExternalFilesForConversation(conversation);
externalFiles.forEach(file => result.add(file));
2018-09-21 01:47:19 +00:00
});
const lastMessage: ConversationType | undefined = last(conversations);
2018-09-21 01:47:19 +00:00
if (lastMessage) {
({ id } = lastMessage);
}
complete = conversations.length < chunkSize;
}
logger.info('getKnownConversationAttachments: Done processing');
2018-09-21 01:47:19 +00:00
return Array.from(result);
}
2024-07-22 18:16:33 +00:00
function removeKnownStickers(
db: WritableDB,
allStickers: ReadonlyArray<string>
2024-07-22 18:16:33 +00:00
): Array<string> {
const lookup: Dictionary<boolean> = fromPairs(
map(allStickers, file => [file, true])
);
const chunkSize = 50;
2024-07-22 18:16:33 +00:00
const total = getStickerCount(db);
logger.info(
`removeKnownStickers: About to iterate through ${total} stickers`
);
let count = 0;
let complete = false;
let rowid = 0;
while (!complete) {
const rows: Array<{ rowid: number; path: string }> = db
.prepare<Query>(
`
SELECT rowid, path FROM stickers
WHERE rowid > $rowid
ORDER BY rowid ASC
LIMIT $chunkSize;
`
)
.all({
rowid,
chunkSize,
});
const files: Array<string> = rows.map(row => row.path);
files.forEach(file => {
delete lookup[file];
});
const lastSticker = last(rows);
if (lastSticker) {
({ rowid } = lastSticker);
}
complete = rows.length < chunkSize;
count += rows.length;
}
logger.info(`removeKnownStickers: Done processing ${count} stickers`);
return Object.keys(lookup);
}
2019-08-07 00:40:25 +00:00
2024-07-22 18:16:33 +00:00
function removeKnownDraftAttachments(
db: WritableDB,
allStickers: ReadonlyArray<string>
2024-07-22 18:16:33 +00:00
): Array<string> {
const lookup: Dictionary<boolean> = fromPairs(
map(allStickers, file => [file, true])
);
2019-08-07 00:40:25 +00:00
const chunkSize = 50;
2024-07-22 18:16:33 +00:00
const total = getConversationCount(db);
logger.info(
2019-08-07 00:40:25 +00:00
`removeKnownDraftAttachments: About to iterate through ${total} conversations`
);
let complete = false;
let count = 0;
// Though conversations.id is a string, this ensures that, when coerced, this
// value is still a string but it's smaller than every other string.
let id: number | string = 0;
2019-08-07 00:40:25 +00:00
while (!complete) {
const rows: JSONRows = db
.prepare<Query>(
`
SELECT json FROM conversations
WHERE id > $id
ORDER BY id ASC
LIMIT $chunkSize;
`
)
.all({
id,
chunkSize,
});
2019-08-07 00:40:25 +00:00
const conversations: Array<ConversationType> = rows.map(row =>
jsonToObject(row.json)
);
conversations.forEach(conversation => {
2019-08-07 00:40:25 +00:00
const externalFiles = getExternalDraftFilesForConversation(conversation);
externalFiles.forEach(file => {
2019-08-07 00:40:25 +00:00
delete lookup[file];
});
});
const lastMessage: ConversationType | undefined = last(conversations);
2019-08-07 00:40:25 +00:00
if (lastMessage) {
({ id } = lastMessage);
}
complete = conversations.length < chunkSize;
count += conversations.length;
}
logger.info(
2019-08-07 00:40:25 +00:00
`removeKnownDraftAttachments: Done processing ${count} conversations`
);
return Object.keys(lookup);
}
2021-04-29 23:02:27 +00:00
2024-07-22 18:16:33 +00:00
export function getJobsInQueue(
db: ReadableDB,
2022-02-16 18:36:21 +00:00
queueType: string
): Array<StoredJob> {
2021-04-29 23:02:27 +00:00
return db
.prepare<Query>(
`
SELECT id, timestamp, data
FROM jobs
WHERE queueType = $queueType
ORDER BY timestamp;
`
)
.all({ queueType })
.map(row => ({
id: row.id,
queueType,
timestamp: row.timestamp,
data: isNotNil(row.data) ? JSON.parse(row.data) : undefined,
}));
}
2024-07-22 18:16:33 +00:00
export function insertJob(db: WritableDB, job: Readonly<StoredJob>): void {
2021-04-29 23:02:27 +00:00
db.prepare<Query>(
`
INSERT INTO jobs
(id, queueType, timestamp, data)
VALUES
($id, $queueType, $timestamp, $data);
`
).run({
id: job.id,
queueType: job.queueType,
timestamp: job.timestamp,
data: isNotNil(job.data) ? JSON.stringify(job.data) : null,
});
}
2024-07-22 18:16:33 +00:00
function deleteJob(db: WritableDB, id: string): void {
2021-04-29 23:02:27 +00:00
db.prepare<Query>('DELETE FROM jobs WHERE id = $id').run({ id });
}
2021-05-28 16:15:17 +00:00
2024-07-22 18:16:33 +00:00
function wasGroupCallRingPreviouslyCanceled(
db: ReadableDB,
2021-08-20 16:06:15 +00:00
ringId: bigint
2024-07-22 18:16:33 +00:00
): boolean {
return db
.prepare<Query>(
`
SELECT EXISTS (
SELECT 1 FROM groupCallRingCancellations
WHERE ringId = $ringId
AND createdAt >= $ringsOlderThanThisAreIgnored
2021-08-20 16:06:15 +00:00
);
`
)
.pluck()
.get({
ringId,
ringsOlderThanThisAreIgnored: Date.now() - MAX_GROUP_CALL_RING_AGE,
});
2021-08-20 16:06:15 +00:00
}
2024-07-22 18:16:33 +00:00
function processGroupCallRingCancellation(
db: WritableDB,
ringId: bigint
): void {
2021-08-20 16:06:15 +00:00
db.prepare<Query>(
`
INSERT INTO groupCallRingCancellations (ringId, createdAt)
VALUES ($ringId, $createdAt)
ON CONFLICT (ringId) DO NOTHING;
2021-08-20 16:06:15 +00:00
`
).run({ ringId, createdAt: Date.now() });
}
// This age, in milliseconds, should be longer than any group call ring duration. Beyond
// that, it doesn't really matter what the value is.
const MAX_GROUP_CALL_RING_AGE = 30 * durations.MINUTE;
2021-08-20 16:06:15 +00:00
2024-07-22 18:16:33 +00:00
function cleanExpiredGroupCallRingCancellations(db: WritableDB): void {
2021-08-20 16:06:15 +00:00
db.prepare<Query>(
`
DELETE FROM groupCallRingCancellations
2021-08-20 16:06:15 +00:00
WHERE createdAt < $expiredRingTime;
`
).run({
expiredRingTime: Date.now() - MAX_GROUP_CALL_RING_AGE,
});
}
2024-07-22 18:16:33 +00:00
function getMaxMessageCounter(db: ReadableDB): number | undefined {
2021-09-15 18:45:22 +00:00
return db
.prepare<EmptyQuery>(
`
SELECT MAX(counter)
FROM
(
SELECT MAX(received_at) AS counter FROM messages
UNION
SELECT MAX(timestamp) AS counter FROM unprocessed
)
`
)
.pluck()
.get();
}
2024-07-22 18:16:33 +00:00
function getStatisticsForLogging(db: ReadableDB): Record<string, string> {
const counts = {
messageCount: getMessageCount(db),
conversationCount: getConversationCount(db),
2021-10-26 22:59:08 +00:00
sessionCount: getCountFromTable(db, 'sessions'),
senderKeyCount: getCountFromTable(db, 'senderKeys'),
2024-07-22 18:16:33 +00:00
};
return mapValues(counts, formatCountForLogging);
}
2024-07-22 18:16:33 +00:00
function updateAllConversationColors(
db: WritableDB,
2021-05-28 16:15:17 +00:00
conversationColor?: ConversationColorType,
customColorData?: {
id: string;
value: CustomColorType;
}
2024-07-22 18:16:33 +00:00
): void {
2021-05-28 16:15:17 +00:00
db.prepare<Query>(
`
UPDATE conversations
SET json = JSON_PATCH(json, $patch);
`
).run({
patch: JSON.stringify({
conversationColor: conversationColor || null,
customColor: customColorData?.value || null,
customColorId: customColorData?.id || null,
}),
});
}
2022-07-08 20:46:25 +00:00
2024-07-22 18:16:33 +00:00
function removeAllProfileKeyCredentials(db: WritableDB): void {
2022-07-08 20:46:25 +00:00
db.exec(
`
UPDATE conversations
SET
json = json_remove(json, '$.profileKeyCredential')
`
);
}
2023-03-27 23:48:57 +00:00
2024-07-22 18:16:33 +00:00
function saveEditedMessages(
db: WritableDB,
mainMessage: ReadonlyDeep<MessageType>,
ourAci: AciString,
history: ReadonlyArray<ReadonlyDeep<EditedMessageType>>
2024-07-22 18:16:33 +00:00
): void {
2023-03-27 23:48:57 +00:00
db.transaction(() => {
2024-07-22 18:16:33 +00:00
saveMessage(db, mainMessage, {
ourAci,
alreadyInTransaction: true,
});
2023-03-27 23:48:57 +00:00
2024-06-03 17:02:25 +00:00
for (const { conversationId, messageId, readStatus, sentAt } of history) {
const [query, params] = sql`
INSERT INTO edited_messages (
conversationId,
messageId,
sentAt,
readStatus
) VALUES (
${conversationId},
${messageId},
${sentAt},
${readStatus}
);
`;
2023-03-27 23:48:57 +00:00
2024-06-03 17:02:25 +00:00
db.prepare(query).run(params);
}
2023-03-27 23:48:57 +00:00
})();
}
2024-07-22 18:16:33 +00:00
function saveEditedMessage(
db: WritableDB,
mainMessage: ReadonlyDeep<MessageType>,
2024-06-03 17:02:25 +00:00
ourAci: AciString,
editedMessage: ReadonlyDeep<EditedMessageType>
2024-07-22 18:16:33 +00:00
): void {
return saveEditedMessages(db, mainMessage, ourAci, [editedMessage]);
2024-06-03 17:02:25 +00:00
}
2024-07-22 18:16:33 +00:00
function _getAllEditedMessages(
db: ReadableDB
): Array<{ messageId: string; sentAt: number }> {
2023-03-27 23:48:57 +00:00
return db
.prepare<Query>(
`
SELECT * FROM edited_messages;
`
)
.all({});
}
2024-07-22 18:16:33 +00:00
function getUnreadEditedMessagesAndMarkRead(
db: WritableDB,
{
conversationId,
newestUnreadAt,
}: {
conversationId: string;
newestUnreadAt: number;
}
): GetUnreadByConversationAndMarkReadResultType {
2023-03-27 23:48:57 +00:00
return db.transaction(() => {
const [selectQuery, selectParams] = sql`
SELECT
messages.id,
messages.json,
edited_messages.sentAt,
edited_messages.readStatus
FROM edited_messages
JOIN messages
ON messages.id = edited_messages.messageId
WHERE
edited_messages.readStatus = ${ReadStatus.Unread} AND
edited_messages.conversationId = ${conversationId} AND
2023-03-27 23:48:57 +00:00
received_at <= ${newestUnreadAt}
ORDER BY messages.received_at DESC, messages.sent_at DESC;
`;
const rows = db.prepare(selectQuery).all(selectParams);
if (rows.length) {
const newestSentAt = rows[0].sentAt;
const [updateStatusQuery, updateStatusParams] = sql`
UPDATE edited_messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
conversationId = ${conversationId} AND
2023-03-27 23:48:57 +00:00
sentAt <= ${newestSentAt};
`;
db.prepare(updateStatusQuery).run(updateStatusParams);
}
return rows.map(row => {
const json = jsonToObject<MessageType>(row.json);
return {
originalReadStatus: row.readStatus,
readStatus: ReadStatus.Read,
seenStatus: SeenStatus.Seen,
...pick(json, [
'expirationStartTimestamp',
'id',
'sent_at',
'source',
2023-08-16 20:54:39 +00:00
'sourceServiceId',
2023-03-27 23:48:57 +00:00
'type',
]),
// Use the edited message timestamp
sent_at: row.sentAt,
};
});
})();
}