Simplify database migrations

This commit is contained in:
Fedor Indutny 2025-08-06 10:32:08 -07:00 committed by GitHub
commit e6809c95db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
106 changed files with 4661 additions and 6814 deletions

View file

@ -3,58 +3,41 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SeenStatus } from '../../MessageSeenStatus';
import { strictAssert } from '../../util/assert';
import { sql, sqlConstant } from '../util';
export const version = 1000;
const READ_STATUS_UNREAD = sqlConstant(ReadStatus.Unread);
const READ_STATUS_READ = sqlConstant(ReadStatus.Read);
const SEEN_STATUS_UNSEEN = sqlConstant(SeenStatus.Unseen);
export function updateToSchemaVersion1000(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1000) {
return;
}
export default function updateToSchemaVersion1000(db: Database): void {
const [selectQuery] = sql`
SELECT id
FROM messages
WHERE messages.type = 'call-history'
AND messages.readStatus IS ${READ_STATUS_UNREAD}
`;
db.transaction(() => {
const [selectQuery] = sql`
SELECT id
FROM messages
WHERE messages.type = 'call-history'
AND messages.readStatus IS ${READ_STATUS_UNREAD}
const rows = db.prepare(selectQuery).all();
for (const row of rows) {
const { id } = row;
strictAssert(id != null, 'message id must exist');
const [updateQuery, updateParams] = sql`
UPDATE messages
SET
json = JSON_PATCH(json, ${JSON.stringify({
readStatus: ReadStatus.Read,
seenStatus: SeenStatus.Unseen,
})}),
readStatus = ${READ_STATUS_READ},
seenStatus = ${SEEN_STATUS_UNSEEN}
WHERE id = ${id}
`;
const rows = db.prepare(selectQuery).all();
for (const row of rows) {
const { id } = row;
strictAssert(id != null, 'message id must exist');
const [updateQuery, updateParams] = sql`
UPDATE messages
SET
json = JSON_PATCH(json, ${JSON.stringify({
readStatus: ReadStatus.Read,
seenStatus: SeenStatus.Unseen,
})}),
readStatus = ${READ_STATUS_READ},
seenStatus = ${SEEN_STATUS_UNSEEN}
WHERE id = ${id}
`;
db.prepare(updateQuery).run(updateParams);
}
db.pragma('user_version = 1000');
})();
logger.info('updateToSchemaVersion1000: success!');
db.prepare(updateQuery).run(updateParams);
}
}

View file

@ -3,38 +3,21 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1010;
export default function updateToSchemaVersion1010(db: Database): void {
const [createTable] = sql`
CREATE TABLE callLinks (
roomId TEXT NOT NULL PRIMARY KEY,
rootKey BLOB NOT NULL,
adminKey BLOB,
name TEXT NOT NULL,
-- Enum which stores CallLinkRestrictions from ringrtc
restrictions INTEGER NOT NULL,
revoked INTEGER NOT NULL,
expiration INTEGER
) STRICT;
`;
export function updateToSchemaVersion1010(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1010) {
return;
}
db.transaction(() => {
const [createTable] = sql`
CREATE TABLE callLinks (
roomId TEXT NOT NULL PRIMARY KEY,
rootKey BLOB NOT NULL,
adminKey BLOB,
name TEXT NOT NULL,
-- Enum which stores CallLinkRestrictions from ringrtc
restrictions INTEGER NOT NULL,
revoked INTEGER NOT NULL,
expiration INTEGER
) STRICT;
`;
db.exec(createTable);
db.pragma('user_version = 1010');
})();
logger.info('updateToSchemaVersion1010: success!');
db.exec(createTable);
}

View file

@ -6,54 +6,39 @@ import { sql } from '../util';
import type { WritableDB } from '../Interface';
import { getOurUuid } from './41-uuid-keys';
export const version = 1020;
export function updateToSchemaVersion1020(
currentVersion: number,
export default function updateToSchemaVersion1020(
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1020) {
const ourAci = getOurUuid(db);
if (ourAci == null) {
logger.info('not linked');
return;
}
db.transaction(() => {
const ourAci = getOurUuid(db);
const [selectQuery, selectParams] = sql`
SELECT id FROM conversations
WHERE serviceId IS ${ourAci}
`;
const ourConversationId = db
.prepare(selectQuery, {
pluck: true,
})
.get(selectParams);
if (ourConversationId == null) {
logger.error('no conversation');
return;
}
if (ourAci == null) {
logger.info('updateToSchemaVersion1020: not linked');
db.pragma('user_version = 1020');
return;
}
const [selectQuery, selectParams] = sql`
SELECT id FROM conversations
WHERE serviceId IS ${ourAci}
`;
const ourConversationId = db
.prepare(selectQuery, {
pluck: true,
})
.get(selectParams);
if (ourConversationId == null) {
logger.error('updateToSchemaVersion1020: no conversation');
db.pragma('user_version = 1020');
return;
}
const [deleteQuery, deleteParams] = sql`
DELETE FROM messages
WHERE
conversationId IS ${ourConversationId} AND
type IS 'conversation-merge'
`;
const { changes } = db.prepare(deleteQuery).run(deleteParams);
if (changes !== 0) {
logger.warn(`updateToSchemaVersion1020: removed ${changes} self merges`);
}
db.pragma('user_version = 1020');
})();
logger.info('updateToSchemaVersion1020: success!');
const [deleteQuery, deleteParams] = sql`
DELETE FROM messages
WHERE
conversationId IS ${ourConversationId} AND
type IS 'conversation-merge'
`;
const { changes } = db.prepare(deleteQuery).run(deleteParams);
if (changes !== 0) {
logger.warn(`removed ${changes} self merges`);
}
}

View file

@ -3,85 +3,68 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql, sqlFragment } from '../util';
export const version = 1030;
export default function updateToSchemaVersion1030(db: Database): void {
// From migration 81
const shouldAffectActivityOrPreview = sqlFragment`
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
AND NOT (
type IS 'message-request-response-event'
AND json_extract(json, '$.messageRequestResponseEvent') IN ('ACCEPT', 'BLOCK', 'UNBLOCK')
)
`;
export function updateToSchemaVersion1030(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1030) {
return;
}
const [updateShouldAffectPreview] = sql`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
db.transaction(() => {
// From migration 81
const shouldAffectActivityOrPreview = sqlFragment`
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
AND NOT (
type IS 'message-request-response-event'
AND json_extract(json, '$.messageRequestResponseEvent') IN ('ACCEPT', 'BLOCK', 'UNBLOCK')
)
`;
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
const [updateShouldAffectPreview] = sql`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
--- (change: added message-request-response-event->ACCEPT/BLOCK/UNBLOCK)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
--- From migration 88
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
--- (change: added message-request-response-event->ACCEPT/BLOCK/UNBLOCK)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
--- From migration 88
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
--- From migration 88
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
--- From migration 88
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
isGroupLeaveEventFromOther, received_at, sent_at);
--- From migration 88
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
--- From migration 81
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`;
--- From migration 88
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
isGroupLeaveEventFromOther, received_at, sent_at);
--- From migration 81
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`;
db.exec(updateShouldAffectPreview);
db.pragma('user_version = 1030');
})();
logger.info('updateToSchemaVersion1030: success!');
db.exec(updateShouldAffectPreview);
}

View file

@ -20,8 +20,6 @@ import {
type JobManagerJobType,
} from '../../jobs/JobManager';
export const version = 1040;
export type _AttachmentDownloadJobTypeV1030 = {
attachment: AttachmentType;
attempts: number;
@ -56,192 +54,171 @@ export type _AttachmentDownloadJobTypeV1040 = Omit<
'attachmentSignature' | 'originalSource'
> & { digest: string };
export function updateToSchemaVersion1040(
currentVersion: number,
export default function updateToSchemaVersion1040(
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1040) {
return;
// 1. Load all existing rows into memory (shouldn't be many)
const existingJobs: Array<{
id: string | null;
timestamp: number | null;
pending: number | null;
json: string | null;
}> = db
.prepare(
`
SELECT id, timestamp, pending, json from attachment_downloads
`
)
.all();
logger.info(`loaded ${existingJobs.length} existing jobs`);
// 2. Create new temp table, with a couple new columns and stricter typing
db.exec(`
CREATE TABLE tmp_attachment_downloads (
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
attachmentType TEXT NOT NULL,
digest TEXT NOT NULL,
receivedAt INTEGER NOT NULL,
sentAt INTEGER NOT NULL,
contentType TEXT NOT NULL,
size INTEGER NOT NULL,
attachmentJson TEXT NOT NULL,
active INTEGER NOT NULL,
attempts INTEGER NOT NULL,
retryAfter INTEGER,
lastAttemptTimestamp INTEGER,
PRIMARY KEY (messageId, attachmentType, digest)
) STRICT;
`);
// 3. Drop existing table
db.exec('DROP TABLE attachment_downloads;');
// 4. Rename temp table
db.exec(
'ALTER TABLE tmp_attachment_downloads RENAME TO attachment_downloads;'
);
// 5. Add new index on active & receivedAt. For most queries when there are lots of
// jobs (like during backup restore), many jobs will match the the WHERE clause, so
// the ORDER BY on receivedAt is probably the most expensive part.
db.exec(`
CREATE INDEX attachment_downloads_active_receivedAt
ON attachment_downloads (
active, receivedAt
);
`);
// 6. Add new index on active & messageId. In order to prioritize visible messages,
// we'll also query for rows with a matching messageId. For these, the messageId
// matching is likely going to be the most expensive part.
db.exec(`
CREATE INDEX attachment_downloads_active_messageId
ON attachment_downloads (
active, messageId
);
`);
// 7. Add new index just on messageId, for the ON DELETE CASCADE foreign key
// constraint
db.exec(`
CREATE INDEX attachment_downloads_messageId
ON attachment_downloads (
messageId
);
`);
// 8. Rewrite old rows to match new schema
const rowsToTransfer: Array<
_AttachmentDownloadJobTypeV1040 & JobManagerJobType
> = [];
for (const existingJob of existingJobs) {
try {
// Type this as partial in case there is missing data
const existingJobData: Partial<_AttachmentDownloadJobTypeV1030> =
jsonToObject(existingJob.json ?? '');
const updatedJob: Partial<_AttachmentDownloadJobTypeV1040> = {
messageId: existingJobData.messageId,
attachmentType: existingJobData.type,
attachment: existingJobData.attachment,
// The existing timestamp column works reasonably well in place of
// actually retrieving the message's receivedAt
receivedAt: existingJobData.timestamp ?? Date.now(),
sentAt: existingJobData.timestamp ?? Date.now(),
digest: existingJobData.attachment?.digest,
contentType: existingJobData.attachment?.contentType,
size: existingJobData.attachment?.size,
active: false, // all jobs are inactive on app start
attempts: existingJobData.attempts ?? 0,
retryAfter: null,
lastAttemptTimestamp: null,
// adding due to changes in the schema
source: AttachmentDownloadSource.STANDARD,
ciphertextSize: 0,
};
const parsed = parsePartial(attachmentDownloadJobSchemaV1040, updatedJob);
rowsToTransfer.push(parsed);
} catch {
logger.warn(
`unable to transfer job ${existingJob.id} to new table; invalid data`
);
}
}
db.transaction(() => {
// 1. Load all existing rows into memory (shouldn't be many)
const existingJobs: Array<{
id: string | null;
timestamp: number | null;
pending: number | null;
json: string | null;
}> = db
.prepare(
`
SELECT id, timestamp, pending, json from attachment_downloads
`
)
.all();
logger.info(
`updateToSchemaVersion1040: loaded ${existingJobs.length} existing jobs`
);
// 2. Create new temp table, with a couple new columns and stricter typing
db.exec(`
CREATE TABLE tmp_attachment_downloads (
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
attachmentType TEXT NOT NULL,
digest TEXT NOT NULL,
receivedAt INTEGER NOT NULL,
sentAt INTEGER NOT NULL,
contentType TEXT NOT NULL,
size INTEGER NOT NULL,
attachmentJson TEXT NOT NULL,
active INTEGER NOT NULL,
attempts INTEGER NOT NULL,
retryAfter INTEGER,
lastAttemptTimestamp INTEGER,
PRIMARY KEY (messageId, attachmentType, digest)
) STRICT;
`);
// 3. Drop existing table
db.exec('DROP TABLE attachment_downloads;');
// 4. Rename temp table
db.exec(
'ALTER TABLE tmp_attachment_downloads RENAME TO attachment_downloads;'
);
// 5. Add new index on active & receivedAt. For most queries when there are lots of
// jobs (like during backup restore), many jobs will match the the WHERE clause, so
// the ORDER BY on receivedAt is probably the most expensive part.
db.exec(`
CREATE INDEX attachment_downloads_active_receivedAt
ON attachment_downloads (
active, receivedAt
);
`);
// 6. Add new index on active & messageId. In order to prioritize visible messages,
// we'll also query for rows with a matching messageId. For these, the messageId
// matching is likely going to be the most expensive part.
db.exec(`
CREATE INDEX attachment_downloads_active_messageId
ON attachment_downloads (
active, messageId
);
`);
// 7. Add new index just on messageId, for the ON DELETE CASCADE foreign key
// constraint
db.exec(`
CREATE INDEX attachment_downloads_messageId
ON attachment_downloads (
messageId
);
`);
// 8. Rewrite old rows to match new schema
const rowsToTransfer: Array<
_AttachmentDownloadJobTypeV1040 & JobManagerJobType
> = [];
for (const existingJob of existingJobs) {
try {
// Type this as partial in case there is missing data
const existingJobData: Partial<_AttachmentDownloadJobTypeV1030> =
jsonToObject(existingJob.json ?? '');
const updatedJob: Partial<_AttachmentDownloadJobTypeV1040> = {
messageId: existingJobData.messageId,
attachmentType: existingJobData.type,
attachment: existingJobData.attachment,
// The existing timestamp column works reasonably well in place of
// actually retrieving the message's receivedAt
receivedAt: existingJobData.timestamp ?? Date.now(),
sentAt: existingJobData.timestamp ?? Date.now(),
digest: existingJobData.attachment?.digest,
contentType: existingJobData.attachment?.contentType,
size: existingJobData.attachment?.size,
active: false, // all jobs are inactive on app start
attempts: existingJobData.attempts ?? 0,
retryAfter: null,
lastAttemptTimestamp: null,
// adding due to changes in the schema
source: AttachmentDownloadSource.STANDARD,
ciphertextSize: 0,
};
const parsed = parsePartial(
attachmentDownloadJobSchemaV1040,
updatedJob
);
rowsToTransfer.push(parsed);
} catch {
logger.warn(
`updateToSchemaVersion1040: unable to transfer job ${existingJob.id} to new table; invalid data`
);
}
}
let numTransferred = 0;
if (rowsToTransfer.length) {
logger.info(
`updateToSchemaVersion1040: transferring ${rowsToTransfer.length} rows`
);
for (const row of rowsToTransfer) {
const [insertQuery, insertParams] = sql`
INSERT INTO attachment_downloads
(
messageId,
attachmentType,
receivedAt,
sentAt,
digest,
contentType,
size,
attachmentJson,
active,
attempts,
retryAfter,
lastAttemptTimestamp
)
VALUES
(
${row.messageId},
${row.attachmentType},
${row.receivedAt},
${row.sentAt},
${row.digest},
${row.contentType},
${row.size},
${objectToJSON(row.attachment)},
${row.active ? 1 : 0},
${row.attempts},
${row.retryAfter},
${row.lastAttemptTimestamp}
);
`;
try {
db.prepare(insertQuery).run(insertParams);
numTransferred += 1;
} catch (error) {
logger.error(
'updateToSchemaVersion1040: error when transferring row',
error
let numTransferred = 0;
if (rowsToTransfer.length) {
logger.info(`transferring ${rowsToTransfer.length} rows`);
for (const row of rowsToTransfer) {
const [insertQuery, insertParams] = sql`
INSERT INTO attachment_downloads
(
messageId,
attachmentType,
receivedAt,
sentAt,
digest,
contentType,
size,
attachmentJson,
active,
attempts,
retryAfter,
lastAttemptTimestamp
)
VALUES
(
${row.messageId},
${row.attachmentType},
${row.receivedAt},
${row.sentAt},
${row.digest},
${row.contentType},
${row.size},
${objectToJSON(row.attachment)},
${row.active ? 1 : 0},
${row.attempts},
${row.retryAfter},
${row.lastAttemptTimestamp}
);
}
`;
try {
db.prepare(insertQuery).run(insertParams);
numTransferred += 1;
} catch (error) {
logger.error('error when transferring row', error);
}
}
}
logger.info(
`updateToSchemaVersion1040: transferred ${numTransferred} rows, removed ${
existingJobs.length - numTransferred
}`
);
db.pragma('user_version = 1040');
})();
logger.info('updateToSchemaVersion1040: success!');
logger.info(
`transferred ${numTransferred} rows, removed ${
existingJobs.length - numTransferred
}`
);
}

View file

@ -3,48 +3,31 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1050;
export default function updateToSchemaVersion1050(db: Database): void {
const [createTables] = sql`
DROP TABLE IF EXISTS groupSendCombinedEndorsement;
DROP TABLE IF EXISTS groupSendMemberEndorsement;
export function updateToSchemaVersion1050(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1050) {
return;
}
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
-- this is the combined endorsement for all group members
CREATE TABLE groupSendCombinedEndorsement (
groupId TEXT NOT NULL PRIMARY KEY, -- Only one endorsement per group
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
endorsement BLOB NOT NULL
) STRICT;
db.transaction(() => {
const [createTables] = sql`
DROP TABLE IF EXISTS groupSendCombinedEndorsement;
DROP TABLE IF EXISTS groupSendMemberEndorsement;
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
-- these are the individual endorsements for each group member
CREATE TABLE groupSendMemberEndorsement (
groupId TEXT NOT NULL,
memberAci TEXT NOT NULL,
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
endorsement BLOB NOT NULL,
PRIMARY KEY (groupId, memberAci) -- Only one endorsement per group member
) STRICT;
`;
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
-- this is the combined endorsement for all group members
CREATE TABLE groupSendCombinedEndorsement (
groupId TEXT NOT NULL PRIMARY KEY, -- Only one endorsement per group
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
endorsement BLOB NOT NULL
) STRICT;
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
-- these are the individual endorsements for each group member
CREATE TABLE groupSendMemberEndorsement (
groupId TEXT NOT NULL,
memberAci TEXT NOT NULL,
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
endorsement BLOB NOT NULL,
PRIMARY KEY (groupId, memberAci) -- Only one endorsement per group member
) STRICT;
`;
db.exec(createTables);
db.pragma('user_version = 1050');
})();
logger.info('updateToSchemaVersion1050: success!');
db.exec(createTables);
}

View file

@ -3,54 +3,36 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1060;
export function updateToSchemaVersion1060(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1060) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE messages
ADD COLUMN isAddressableMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type IN (
'incoming',
'outgoing'
)
);
CREATE INDEX messages_by_date_addressable
ON messages (
conversationId, isAddressableMessage, received_at, sent_at
export default function updateToSchemaVersion1060(db: Database): void {
db.exec(`
ALTER TABLE messages
ADD COLUMN isAddressableMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type IN (
'incoming',
'outgoing'
)
);
CREATE TABLE syncTasks(
id TEXT PRIMARY KEY NOT NULL,
attempts INTEGER NOT NULL,
createdAt INTEGER NOT NULL,
data TEXT NOT NULL,
envelopeId TEXT NOT NULL,
sentAt INTEGER NOT NULL,
type TEXT NOT NULL
) STRICT;
CREATE INDEX messages_by_date_addressable
ON messages (
conversationId, isAddressableMessage, received_at, sent_at
);
CREATE INDEX syncTasks_order ON syncTasks (
createdAt, sentAt, id
)
`);
CREATE TABLE syncTasks(
id TEXT PRIMARY KEY NOT NULL,
attempts INTEGER NOT NULL,
createdAt INTEGER NOT NULL,
data TEXT NOT NULL,
envelopeId TEXT NOT NULL,
sentAt INTEGER NOT NULL,
type TEXT NOT NULL
) STRICT;
db.pragma('user_version = 1060');
})();
logger.info('updateToSchemaVersion1060: success!');
CREATE INDEX syncTasks_order ON syncTasks (
createdAt, sentAt, id
)
`);
}

View file

@ -3,53 +3,35 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion1070(db: Database): void {
db.exec(`
CREATE TABLE attachment_backup_jobs (
mediaName TEXT NOT NULL PRIMARY KEY,
type TEXT NOT NULL,
data TEXT NOT NULL,
receivedAt INTEGER NOT NULL,
export const version = 1070;
-- job manager fields
attempts INTEGER NOT NULL,
active INTEGER NOT NULL,
retryAfter INTEGER,
lastAttemptTimestamp INTEGER
) STRICT;
export function updateToSchemaVersion1070(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1070) {
return;
}
CREATE INDEX attachment_backup_jobs_receivedAt
ON attachment_backup_jobs (
receivedAt
);
db.transaction(() => {
db.exec(`
CREATE TABLE attachment_backup_jobs (
mediaName TEXT NOT NULL PRIMARY KEY,
type TEXT NOT NULL,
data TEXT NOT NULL,
receivedAt INTEGER NOT NULL,
CREATE INDEX attachment_backup_jobs_type_receivedAt
ON attachment_backup_jobs (
type, receivedAt
);
-- job manager fields
attempts INTEGER NOT NULL,
active INTEGER NOT NULL,
retryAfter INTEGER,
lastAttemptTimestamp INTEGER
) STRICT;
CREATE INDEX attachment_backup_jobs_receivedAt
ON attachment_backup_jobs (
receivedAt
);
CREATE INDEX attachment_backup_jobs_type_receivedAt
ON attachment_backup_jobs (
type, receivedAt
);
CREATE TABLE backup_cdn_object_metadata (
mediaId TEXT NOT NULL PRIMARY KEY,
cdnNumber INTEGER NOT NULL,
sizeOnBackupCdn INTEGER
) STRICT;
`);
db.pragma('user_version = 1070');
})();
logger.info('updateToSchemaVersion1070: success!');
CREATE TABLE backup_cdn_object_metadata (
mediaId TEXT NOT NULL PRIMARY KEY,
cdnNumber INTEGER NOT NULL,
sizeOnBackupCdn INTEGER
) STRICT;
`);
}

View file

@ -3,29 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1080;
export function updateToSchemaVersion1080(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1080) {
return;
}
db.transaction(() => {
db.exec(`
CREATE INDEX messages_by_date_addressable_nondisappearing
ON messages (
conversationId, isAddressableMessage, received_at, sent_at
) WHERE expireTimer IS NULL;
`);
db.pragma('user_version = 1080');
})();
logger.info('updateToSchemaVersion1080: success!');
export default function updateToSchemaVersion1080(db: Database): void {
db.exec(`
CREATE INDEX messages_by_date_addressable_nondisappearing
ON messages (
conversationId, isAddressableMessage, received_at, sent_at
) WHERE expireTimer IS NULL;
`);
}

View file

@ -3,30 +3,12 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion1090(db: Database): void {
db.exec(`
CREATE INDEX reactions_messageId
ON reactions (messageId);
export const version = 1090;
export function updateToSchemaVersion1090(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1090) {
return;
}
db.transaction(() => {
db.exec(`
CREATE INDEX reactions_messageId
ON reactions (messageId);
CREATE INDEX storyReads_storyId
ON storyReads (storyId);
`);
db.pragma('user_version = 1090');
})();
logger.info('updateToSchemaVersion1090: success!');
CREATE INDEX storyReads_storyId
ON storyReads (storyId);
`);
}

View file

@ -2,62 +2,45 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1100;
export default function updateToSchemaVersion1100(db: Database): void {
const [query] = sql`
-- Fix: Query went from readStatus to seenStatus but index wasn't updated
DROP INDEX IF EXISTS messages_callHistory_readStatus;
DROP INDEX IF EXISTS messages_callHistory_seenStatus;
CREATE INDEX messages_callHistory_seenStatus
ON messages (type, seenStatus)
WHERE type IS 'call-history';
export function updateToSchemaVersion1100(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1100) {
return;
}
-- Update to index created in 89: add sent_at to make it covering, and where clause to make it smaller
DROP INDEX IF EXISTS messages_call;
CREATE INDEX messages_call ON messages
(type, conversationId, callId, sent_at)
WHERE type IS 'call-history';
db.transaction(() => {
const [query] = sql`
-- Fix: Query went from readStatus to seenStatus but index wasn't updated
DROP INDEX IF EXISTS messages_callHistory_readStatus;
DROP INDEX IF EXISTS messages_callHistory_seenStatus;
CREATE INDEX messages_callHistory_seenStatus
ON messages (type, seenStatus)
WHERE type IS 'call-history';
-- Update to index created in 89: add callId and peerId to make it covering
DROP INDEX IF EXISTS callsHistory_order;
CREATE INDEX callsHistory_order ON callsHistory
(timestamp DESC, callId, peerId);
-- Update to index created in 89: add sent_at to make it covering, and where clause to make it smaller
DROP INDEX IF EXISTS messages_call;
CREATE INDEX messages_call ON messages
(type, conversationId, callId, sent_at)
WHERE type IS 'call-history';
-- Update to index created in 89: add timestamp for querying by order and callId to make it covering
DROP INDEX IF EXISTS callsHistory_byConversation;
DROP INDEX IF EXISTS callsHistory_byConversation_order;
CREATE INDEX callsHistory_byConversation_order ON callsHistory (peerId, timestamp DESC, callId);
-- Update to index created in 89: add callId and peerId to make it covering
DROP INDEX IF EXISTS callsHistory_order;
CREATE INDEX callsHistory_order ON callsHistory
(timestamp DESC, callId, peerId);
-- Optimize markAllCallHistoryRead
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
CREATE INDEX messages_callHistory_markReadBefore
ON messages (type, seenStatus, sent_at DESC)
WHERE type IS 'call-history';
-- Update to index created in 89: add timestamp for querying by order and callId to make it covering
DROP INDEX IF EXISTS callsHistory_byConversation;
DROP INDEX IF EXISTS callsHistory_byConversation_order;
CREATE INDEX callsHistory_byConversation_order ON callsHistory (peerId, timestamp DESC, callId);
-- Optimize markAllCallHistoryReadInConversation
DROP INDEX IF EXISTS messages_callHistory_markReadByConversationBefore;
CREATE INDEX messages_callHistory_markReadByConversationBefore
ON messages (type, conversationId, seenStatus, sent_at DESC)
WHERE type IS 'call-history';
`;
-- Optimize markAllCallHistoryRead
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
CREATE INDEX messages_callHistory_markReadBefore
ON messages (type, seenStatus, sent_at DESC)
WHERE type IS 'call-history';
-- Optimize markAllCallHistoryReadInConversation
DROP INDEX IF EXISTS messages_callHistory_markReadByConversationBefore;
CREATE INDEX messages_callHistory_markReadByConversationBefore
ON messages (type, conversationId, seenStatus, sent_at DESC)
WHERE type IS 'call-history';
`;
db.exec(query);
db.pragma('user_version = 1100');
})();
logger.info('updateToSchemaVersion1100: success!');
db.exec(query);
}

View file

@ -3,33 +3,15 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion1110(db: Database): void {
db.exec(`
ALTER TABLE stickers
ADD COLUMN version INTEGER NOT NULL DEFAULT 1;
export const version = 1110;
ALTER TABLE stickers
ADD COLUMN localKey TEXT;
export function updateToSchemaVersion1110(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1110) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE stickers
ADD COLUMN version INTEGER NOT NULL DEFAULT 1;
ALTER TABLE stickers
ADD COLUMN localKey TEXT;
ALTER TABLE stickers
ADD COLUMN size INTEGER;
`);
db.pragma('user_version = 1110');
})();
logger.info('updateToSchemaVersion1110: success!');
ALTER TABLE stickers
ADD COLUMN size INTEGER;
`);
}

View file

@ -3,31 +3,13 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion1120(db: Database): void {
/** Adds indexes for all tables with foreign key relationships to messages(id) */
db.exec(`
CREATE INDEX edited_messages_messageId
ON edited_messages(messageId);
export const version = 1120;
export function updateToSchemaVersion1120(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1120) {
return;
}
db.transaction(() => {
/** Adds indexes for all tables with foreign key relationships to messages(id) */
db.exec(`
CREATE INDEX edited_messages_messageId
ON edited_messages(messageId);
CREATE INDEX mentions_messageId
ON mentions(messageId);
`);
db.pragma('user_version = 1120');
})();
logger.info('updateToSchemaVersion1120: success!');
CREATE INDEX mentions_messageId
ON mentions(messageId);
`);
}

View file

@ -3,29 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1130;
export function updateToSchemaVersion1130(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1130) {
return;
}
db.transaction(() => {
// This is to improve the performance of getAllStories
db.exec(`
CREATE INDEX messages_isStory
ON messages(received_at, sent_at)
WHERE isStory = 1;
`);
db.pragma('user_version = 1130');
})();
logger.info('updateToSchemaVersion1130: success!');
export default function updateToSchemaVersion1130(db: Database): void {
// This is to improve the performance of getAllStories
db.exec(`
CREATE INDEX messages_isStory
ON messages(received_at, sent_at)
WHERE isStory = 1;
`);
}

View file

@ -1,31 +1,15 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1140;
export default function updateToSchemaVersion1140(db: Database): void {
db.exec(`
DROP INDEX IF EXISTS callLinks_deleted;
export function updateToSchemaVersion1140(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1140) {
return;
}
ALTER TABLE callLinks
ADD COLUMN deleted INTEGER NOT NULL DEFAULT 0;
db.transaction(() => {
db.exec(`
DROP INDEX IF EXISTS callLinks_deleted;
ALTER TABLE callLinks
ADD COLUMN deleted INTEGER NOT NULL DEFAULT 0;
CREATE INDEX callLinks_deleted
ON callLinks (deleted, roomId);
`);
db.pragma('user_version = 1140');
})();
logger.info('updateToSchemaVersion1140: success!');
CREATE INDEX callLinks_deleted
ON callLinks (deleted, roomId);
`);
}

View file

@ -1,30 +1,14 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1150;
export default function updateToSchemaVersion1150(db: Database): void {
db.exec(`
-- All future conversations will start from '1'
ALTER TABLE conversations
ADD COLUMN expireTimerVersion INTEGER NOT NULL DEFAULT 1;
export function updateToSchemaVersion1150(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1150) {
return;
}
db.transaction(() => {
db.exec(`
-- All future conversations will start from '1'
ALTER TABLE conversations
ADD COLUMN expireTimerVersion INTEGER NOT NULL DEFAULT 1;
-- All current conversations will start from '2'
UPDATE conversations SET expireTimerVersion = 2;
`);
db.pragma('user_version = 1150');
})();
logger.info('updateToSchemaVersion1150: success!');
-- All current conversations will start from '2'
UPDATE conversations SET expireTimerVersion = 2;
`);
}

View file

@ -1,36 +1,20 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql, sqlConstant } from '../util';
import { CallDirection, CallStatusValue } from '../../types/CallDisposition';
export const version = 1160;
const CALL_STATUS_MISSED = sqlConstant(CallStatusValue.Missed);
const CALL_DIRECTION_INCOMING = sqlConstant(CallDirection.Incoming);
export function updateToSchemaVersion1160(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1160) {
return;
}
export default function updateToSchemaVersion1160(db: Database): void {
const [query] = sql`
DROP INDEX IF EXISTS callsHistory_incoming_missed;
db.transaction(() => {
const [query] = sql`
DROP INDEX IF EXISTS callsHistory_incoming_missed;
CREATE INDEX callsHistory_incoming_missed
ON callsHistory (callId, status, direction)
WHERE status IS ${CALL_STATUS_MISSED}
AND direction IS ${CALL_DIRECTION_INCOMING};
`;
db.exec(query);
db.pragma('user_version = 1160');
})();
logger.info('updateToSchemaVersion1160: success!');
CREATE INDEX callsHistory_incoming_missed
ON callsHistory (callId, status, direction)
WHERE status IS ${CALL_STATUS_MISSED}
AND direction IS ${CALL_DIRECTION_INCOMING};
`;
db.exec(query);
}

View file

@ -1,29 +1,14 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1170;
export function updateToSchemaVersion1170(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1170) {
return;
}
db.transaction(() => {
const [query] = sql`
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
CREATE INDEX messages_callHistory_markReadBefore
ON messages (type, seenStatus, received_at DESC)
WHERE type IS 'call-history';
`;
db.exec(query);
db.pragma('user_version = 1170');
})();
logger.info('updateToSchemaVersion1170: success!');
export default function updateToSchemaVersion1170(db: Database): void {
const [query] = sql`
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
CREATE INDEX messages_callHistory_markReadBefore
ON messages (type, seenStatus, received_at DESC)
WHERE type IS 'call-history';
`;
db.exec(query);
}

View file

@ -1,37 +1,22 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { AttachmentDownloadSource } from '../Interface';
export const version = 1180;
export function updateToSchemaVersion1180(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1180) {
return;
}
export default function updateToSchemaVersion1180(db: Database): void {
db.exec(`
ALTER TABLE attachment_downloads
ADD COLUMN source TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
db.transaction(() => {
db.exec(`
ALTER TABLE attachment_downloads
ADD COLUMN source TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
ALTER TABLE attachment_downloads
-- this default value will be overridden by getNextAttachmentDownloadJobs
ADD COLUMN ciphertextSize INTEGER NOT NULL DEFAULT 0;
`);
ALTER TABLE attachment_downloads
-- this default value will be overridden by getNextAttachmentDownloadJobs
ADD COLUMN ciphertextSize INTEGER NOT NULL DEFAULT 0;
`);
db.exec(`
CREATE INDEX attachment_downloads_source_ciphertextSize
ON attachment_downloads (
source, ciphertextSize
);
`);
db.pragma('user_version = 1180');
})();
logger.info('updateToSchemaVersion1180: success!');
db.exec(`
CREATE INDEX attachment_downloads_source_ciphertextSize
ON attachment_downloads (
source, ciphertextSize
);
`);
}

View file

@ -1,38 +1,22 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1190;
export function updateToSchemaVersion1190(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1190) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE callLinks ADD COLUMN storageID TEXT;
ALTER TABLE callLinks ADD COLUMN storageVersion INTEGER;
ALTER TABLE callLinks ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE callLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
ALTER TABLE callLinks ADD COLUMN deletedAt INTEGER;
`);
db.prepare(
`
UPDATE callLinks
SET deletedAt = $deletedAt
WHERE deleted = 1;
`
).run({
deletedAt: new Date().getTime(),
});
db.pragma('user_version = 1190');
})();
logger.info('updateToSchemaVersion1190: success!');
export default function updateToSchemaVersion1190(db: Database): void {
db.exec(`
ALTER TABLE callLinks ADD COLUMN storageID TEXT;
ALTER TABLE callLinks ADD COLUMN storageVersion INTEGER;
ALTER TABLE callLinks ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE callLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
ALTER TABLE callLinks ADD COLUMN deletedAt INTEGER;
`);
db.prepare(
`
UPDATE callLinks
SET deletedAt = $deletedAt
WHERE deleted = 1;
`
).run({
deletedAt: new Date().getTime(),
});
}

View file

@ -1,29 +1,14 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1200;
export function updateToSchemaVersion1200(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1200) {
return;
}
db.transaction(() => {
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
// ordered by received_at
db.exec(`
CREATE INDEX attachment_downloads_active_source_receivedAt
ON attachment_downloads (
active, source, receivedAt
);
`);
db.pragma('user_version = 1200');
})();
logger.info('updateToSchemaVersion1200: success!');
export default function updateToSchemaVersion1200(db: Database): void {
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
// ordered by received_at
db.exec(`
CREATE INDEX attachment_downloads_active_source_receivedAt
ON attachment_downloads (
active, source, receivedAt
);
`);
}

View file

@ -1,30 +1,15 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1210;
export function updateToSchemaVersion1210(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1210) {
return;
}
export default function updateToSchemaVersion1210(db: Database): void {
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
// ordered by received_at
db.exec(`
ALTER TABLE callsHistory
ADD COLUMN startedById TEXT DEFAULT NULL;
db.transaction(() => {
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
// ordered by received_at
db.exec(`
ALTER TABLE callsHistory
ADD COLUMN startedById TEXT DEFAULT NULL;
ALTER TABLE callsHistory
ADD COLUMN endedTimestamp INTEGER DEFAULT NULL;
`);
db.pragma('user_version = 1210');
})();
logger.info('updateToSchemaVersion1210: success!');
ALTER TABLE callsHistory
ADD COLUMN endedTimestamp INTEGER DEFAULT NULL;
`);
}

View file

@ -120,100 +120,84 @@ function migrateSession(
throw missingCaseError(session.version);
}
export function updateToSchemaVersion1220(
currentVersion: number,
export default function updateToSchemaVersion1220(
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1220) {
db.exec(`
ALTER TABLE sessions
RENAME TO old_sessions;
CREATE TABLE sessions (
id TEXT NOT NULL PRIMARY KEY,
ourServiceId TEXT NOT NULL,
serviceId TEXT NOT NULL,
conversationId TEXT NOT NULL,
deviceId INTEGER NOT NULL,
record BLOB NOT NULL
) STRICT;
`);
const getItem = db.prepare(
`
SELECT json -> '$.value' FROM items WHERE id IS ?
`,
{
pluck: true,
}
);
const identityKeyMapJson = getItem.get<string>(['identityKeyMap']);
const registrationIdMapJson = getItem.get<string>(['registrationIdMap']);
// If we don't have private keys - the sessions cannot be used anyway
if (!identityKeyMapJson || !registrationIdMapJson) {
logger.info('no identity/registration id');
db.exec('DROP TABLE old_sessions');
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE sessions
RENAME TO old_sessions;
const identityKeyMap = identityKeyMapSchema.parse(
JSON.parse(identityKeyMapJson)
);
const registrationIdMap = registrationIdMapSchema.parse(
JSON.parse(registrationIdMapJson)
);
CREATE TABLE sessions (
id TEXT NOT NULL PRIMARY KEY,
ourServiceId TEXT NOT NULL,
serviceId TEXT NOT NULL,
conversationId TEXT NOT NULL,
deviceId INTEGER NOT NULL,
record BLOB NOT NULL
) STRICT;
`);
const getSessionsPage = db.prepare(
'DELETE FROM old_sessions RETURNING * LIMIT 1000'
);
const insertSession = db.prepare(`
INSERT INTO sessions
(id, ourServiceId, serviceId, conversationId, deviceId, record)
VALUES
($id, $ourServiceId, $serviceId, $conversationId, $deviceId, $record)
`);
const getItem = db.prepare(
`
SELECT json -> '$.value' FROM items WHERE id IS ?
`,
{
pluck: true,
}
);
let migrated = 0;
let failed = 0;
const identityKeyMapJson = getItem.get<string>(['identityKeyMap']);
const registrationIdMapJson = getItem.get<string>(['registrationIdMap']);
// If we don't have private keys - the sessions cannot be used anyway
if (!identityKeyMapJson || !registrationIdMapJson) {
logger.info('updateToSchemaVersion1220: no identity/registration id');
db.exec('DROP TABLE old_sessions');
db.pragma('user_version = 1220');
return;
// eslint-disable-next-line no-constant-condition
while (true) {
const rows: Array<PreviousSessionRowType> = getSessionsPage.all();
if (rows.length === 0) {
break;
}
const identityKeyMap = identityKeyMapSchema.parse(
JSON.parse(identityKeyMapJson)
);
const registrationIdMap = registrationIdMapSchema.parse(
JSON.parse(registrationIdMapJson)
);
const getSessionsPage = db.prepare(
'DELETE FROM old_sessions RETURNING * LIMIT 1000'
);
const insertSession = db.prepare(`
INSERT INTO sessions
(id, ourServiceId, serviceId, conversationId, deviceId, record)
VALUES
($id, $ourServiceId, $serviceId, $conversationId, $deviceId, $record)
`);
let migrated = 0;
let failed = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
const rows: Array<PreviousSessionRowType> = getSessionsPage.all();
if (rows.length === 0) {
break;
}
for (const row of rows) {
try {
insertSession.run(
migrateSession(row, identityKeyMap, registrationIdMap, logger)
);
migrated += 1;
} catch (error) {
failed += 1;
logger.error(
'updateToSchemaVersion1220: failed to migrate session',
Errors.toLogFormat(error)
);
}
for (const row of rows) {
try {
insertSession.run(
migrateSession(row, identityKeyMap, registrationIdMap, logger)
);
migrated += 1;
} catch (error) {
failed += 1;
logger.error('failed to migrate session', Errors.toLogFormat(error));
}
}
}
logger.info(
`updateToSchemaVersion1220: migrated ${migrated} sessions, ` +
`${failed} failed`
);
logger.info(`migrated ${migrated} sessions, ${failed} failed`);
db.exec('DROP TABLE old_sessions');
db.pragma('user_version = 1220');
})();
logger.info('updateToSchemaVersion1220: success!');
db.exec('DROP TABLE old_sessions');
}

View file

@ -1,28 +1,12 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1230;
export default function updateToSchemaVersion1230(db: Database): void {
db.exec(`
DROP INDEX IF EXISTS callLinks_adminKey;
export function updateToSchemaVersion1230(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1230) {
return;
}
db.transaction(() => {
db.exec(`
DROP INDEX IF EXISTS callLinks_adminKey;
CREATE INDEX callLinks_adminKey
ON callLinks (adminKey);
`);
db.pragma('user_version = 1230');
})();
logger.info('updateToSchemaVersion1230: success!');
CREATE INDEX callLinks_adminKey
ON callLinks (adminKey);
`);
}

View file

@ -3,33 +3,16 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1240;
export default function updateToSchemaVersion1240(db: Database): void {
const [createTable] = sql`
CREATE TABLE defunctCallLinks (
roomId TEXT NOT NULL PRIMARY KEY,
rootKey BLOB NOT NULL,
adminKey BLOB
) STRICT;
`;
export function updateToSchemaVersion1240(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1240) {
return;
}
db.transaction(() => {
const [createTable] = sql`
CREATE TABLE defunctCallLinks (
roomId TEXT NOT NULL PRIMARY KEY,
rootKey BLOB NOT NULL,
adminKey BLOB
) STRICT;
`;
db.exec(createTable);
db.pragma('user_version = 1240');
})();
logger.info('updateToSchemaVersion1240: success!');
db.exec(createTable);
}

View file

@ -1,28 +1,12 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 1250;
export function updateToSchemaVersion1250(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1250) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE defunctCallLinks ADD COLUMN storageID TEXT;
ALTER TABLE defunctCallLinks ADD COLUMN storageVersion INTEGER;
ALTER TABLE defunctCallLinks ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE defunctCallLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
`);
db.pragma('user_version = 1250');
})();
logger.info('updateToSchemaVersion1250: success!');
export default function updateToSchemaVersion1250(db: Database): void {
db.exec(`
ALTER TABLE defunctCallLinks ADD COLUMN storageID TEXT;
ALTER TABLE defunctCallLinks ADD COLUMN storageVersion INTEGER;
ALTER TABLE defunctCallLinks ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE defunctCallLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
`);
}

View file

@ -1,30 +1,13 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1260;
export default function updateToSchemaVersion1260(db: Database): void {
const [query] = sql`
DROP INDEX IF EXISTS syncTasks_order;
CREATE INDEX syncTasks_delete ON syncTasks (attempts DESC);
`;
export function updateToSchemaVersion1260(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1260) {
return;
}
db.transaction(() => {
const [query] = sql`
DROP INDEX IF EXISTS syncTasks_order;
CREATE INDEX syncTasks_delete ON syncTasks (attempts DESC);
`;
db.exec(query);
db.pragma('user_version = 1260');
})();
logger.info('updateToSchemaVersion1260: success!');
db.exec(query);
}

View file

@ -1,53 +1,36 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1270;
export default function updateToSchemaVersion1270(db: Database): void {
const [query] = sql`
ALTER TABLE messages
ADD COLUMN timestamp INTEGER;
ALTER TABLE messages
ADD COLUMN received_at_ms INTEGER;
ALTER TABLE messages
ADD COLUMN unidentifiedDeliveryReceived INTEGER;
ALTER TABLE messages
ADD COLUMN serverTimestamp INTEGER;
export function updateToSchemaVersion1270(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1270) {
return;
}
ALTER TABLE messages
RENAME COLUMN source TO legacySource;
ALTER TABLE messages
ADD COLUMN source TEXT;
db.transaction(() => {
const [query] = sql`
ALTER TABLE messages
ADD COLUMN timestamp INTEGER;
ALTER TABLE messages
ADD COLUMN received_at_ms INTEGER;
ALTER TABLE messages
ADD COLUMN unidentifiedDeliveryReceived INTEGER;
ALTER TABLE messages
ADD COLUMN serverTimestamp INTEGER;
UPDATE messages SET
timestamp = json_extract(json, '$.timestamp'),
received_at_ms = json_extract(json, '$.received_at_ms'),
unidentifiedDeliveryReceived =
json_extract(json, '$.unidentifiedDeliveryReceived'),
serverTimestamp =
json_extract(json, '$.serverTimestamp'),
source = IFNULL(json_extract(json, '$.source'), '+' || legacySource);
ALTER TABLE messages
RENAME COLUMN source TO legacySource;
ALTER TABLE messages
ADD COLUMN source TEXT;
ALTER TABLE messages
DROP COLUMN legacySource;
`;
UPDATE messages SET
timestamp = json_extract(json, '$.timestamp'),
received_at_ms = json_extract(json, '$.received_at_ms'),
unidentifiedDeliveryReceived =
json_extract(json, '$.unidentifiedDeliveryReceived'),
serverTimestamp =
json_extract(json, '$.serverTimestamp'),
source = IFNULL(json_extract(json, '$.source'), '+' || legacySource);
ALTER TABLE messages
DROP COLUMN legacySource;
`;
db.exec(query);
db.pragma('user_version = 1270');
})();
logger.info('updateToSchemaVersion1270: success!');
db.exec(query);
}

View file

@ -14,166 +14,142 @@ import { sql } from '../util';
import type { WritableDB } from '../Interface';
import { getOurUuid } from './41-uuid-keys';
export const version = 1280;
export function updateToSchemaVersion1280(
currentVersion: number,
export default function updateToSchemaVersion1280(
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1280) {
return;
const ourAci = getOurUuid(db);
let rows = db.prepare('SELECT * FROM unprocessed').all();
const [query] = sql`
DROP TABLE unprocessed;
CREATE TABLE unprocessed(
id TEXT NOT NULL PRIMARY KEY ASC,
type INTEGER NOT NULL,
timestamp INTEGER NOT NULL,
attempts INTEGER NOT NULL,
receivedAtCounter INTEGER NOT NULL,
urgent INTEGER NOT NULL,
story INTEGER NOT NULL,
serverGuid TEXT NOT NULL,
serverTimestamp INTEGER NOT NULL,
isEncrypted INTEGER NOT NULL,
content BLOB NOT NULL,
messageAgeSec INTEGER NOT NULL,
destinationServiceId TEXT NOT NULL,
-- Not present for 1:1 messages and not sealed messages
groupId TEXT,
-- Not present for sealed envelopes
reportingToken BLOB,
source TEXT,
sourceServiceId TEXT,
sourceDevice TEXT,
-- Present only for PNP change number
updatedPni TEXT
) STRICT;
CREATE INDEX unprocessed_timestamp ON unprocessed
(timestamp);
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
(receivedAtCounter);
`;
db.exec(query);
const insertStmt = db.prepare(`
INSERT INTO unprocessed
(id, type, timestamp, attempts, receivedAtCounter, urgent, story,
serverGuid, serverTimestamp, isEncrypted, content, source,
messageAgeSec, sourceServiceId, sourceDevice,
destinationServiceId, reportingToken)
VALUES
($id, $type, $timestamp, $attempts, $receivedAtCounter, $urgent, $story,
$serverGuid, $serverTimestamp, $isEncrypted, $content, $source,
$messageAgeSec, $sourceServiceId, $sourceDevice,
$destinationServiceId, $reportingToken);
`);
let oldEnvelopes = 0;
if (!ourAci) {
if (rows.length) {
logger.warn(`no aci, dropping ${rows.length} envelopes`);
rows = [];
}
}
db.transaction(() => {
const ourAci = getOurUuid(db);
for (const row of rows) {
const {
id,
envelope,
decrypted,
timestamp,
attempts,
version: envelopeVersion,
receivedAtCounter,
urgent,
story,
serverGuid,
serverTimestamp,
...rest
} = row;
let rows = db.prepare('SELECT * FROM unprocessed').all();
const [query] = sql`
DROP TABLE unprocessed;
CREATE TABLE unprocessed(
id TEXT NOT NULL PRIMARY KEY ASC,
type INTEGER NOT NULL,
timestamp INTEGER NOT NULL,
attempts INTEGER NOT NULL,
receivedAtCounter INTEGER NOT NULL,
urgent INTEGER NOT NULL,
story INTEGER NOT NULL,
serverGuid TEXT NOT NULL,
serverTimestamp INTEGER NOT NULL,
isEncrypted INTEGER NOT NULL,
content BLOB NOT NULL,
messageAgeSec INTEGER NOT NULL,
destinationServiceId TEXT NOT NULL,
-- Not present for 1:1 messages and not sealed messages
groupId TEXT,
-- Not present for sealed envelopes
reportingToken BLOB,
source TEXT,
sourceServiceId TEXT,
sourceDevice TEXT,
-- Present only for PNP change number
updatedPni TEXT
) STRICT;
CREATE INDEX unprocessed_timestamp ON unprocessed
(timestamp);
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
(receivedAtCounter);
`;
db.exec(query);
const insertStmt = db.prepare(`
INSERT INTO unprocessed
(id, type, timestamp, attempts, receivedAtCounter, urgent, story,
serverGuid, serverTimestamp, isEncrypted, content, source,
messageAgeSec, sourceServiceId, sourceDevice,
destinationServiceId, reportingToken)
VALUES
($id, $type, $timestamp, $attempts, $receivedAtCounter, $urgent, $story,
$serverGuid, $serverTimestamp, $isEncrypted, $content, $source,
$messageAgeSec, $sourceServiceId, $sourceDevice,
$destinationServiceId, $reportingToken);
`);
let oldEnvelopes = 0;
if (!ourAci) {
if (rows.length) {
logger.warn(
`updateToSchemaVersion1280: no aci, dropping ${rows.length} envelopes`
);
rows = [];
}
// Skip old and/or invalid rows
if (envelopeVersion !== 2 || !envelope) {
oldEnvelopes += 1;
continue;
}
for (const row of rows) {
const {
id,
envelope,
decrypted,
timestamp,
attempts,
version: envelopeVersion,
receivedAtCounter,
urgent,
story,
serverGuid,
serverTimestamp,
...rest
} = row;
// Skip old and/or invalid rows
if (envelopeVersion !== 2 || !envelope) {
oldEnvelopes += 1;
continue;
}
try {
const decoded = Proto.Envelope.decode(
Buffer.from(String(envelope), 'base64')
);
if (!decoded.content) {
throw new Error('Missing envelope content');
}
const content = decrypted
? Buffer.from(String(decrypted), 'base64')
: decoded.content;
insertStmt.run({
...rest,
id,
type: decoded.type ?? Proto.Envelope.Type.UNKNOWN,
content: content ?? null,
isEncrypted: decrypted ? 0 : 1,
timestamp: timestamp || Date.now(),
attempts: attempts || 0,
receivedAtCounter: receivedAtCounter || 0,
urgent: urgent ? 1 : 0,
story: story ? 1 : 0,
serverGuid: serverGuid || getGuid(),
serverTimestamp: serverTimestamp || 0,
destinationServiceId:
normalizeServiceId(
decoded.destinationServiceId || ourAci,
'Envelope.destinationServiceId'
) ?? null,
updatedPni: isUntaggedPniString(decoded.updatedPni)
? normalizePni(
toTaggedPni(decoded.updatedPni),
'Envelope.updatedPni'
)
: null,
// Sadly not captured previously
messageAgeSec: 0,
reportingToken: decoded.reportSpamToken?.length
? decoded.reportSpamToken
: null,
});
} catch (error) {
logger.warn(
'updateToSchemaVersion1280: failed to migrate unprocessed',
id,
error
);
}
}
if (oldEnvelopes !== 0) {
logger.warn(
`updateToSchemaVersion1280: dropped ${oldEnvelopes} envelopes`
try {
const decoded = Proto.Envelope.decode(
Buffer.from(String(envelope), 'base64')
);
if (!decoded.content) {
throw new Error('Missing envelope content');
}
const content = decrypted
? Buffer.from(String(decrypted), 'base64')
: decoded.content;
insertStmt.run({
...rest,
id,
type: decoded.type ?? Proto.Envelope.Type.UNKNOWN,
content: content ?? null,
isEncrypted: decrypted ? 0 : 1,
timestamp: timestamp || Date.now(),
attempts: attempts || 0,
receivedAtCounter: receivedAtCounter || 0,
urgent: urgent ? 1 : 0,
story: story ? 1 : 0,
serverGuid: serverGuid || getGuid(),
serverTimestamp: serverTimestamp || 0,
destinationServiceId:
normalizeServiceId(
decoded.destinationServiceId || ourAci,
'Envelope.destinationServiceId'
) ?? null,
updatedPni: isUntaggedPniString(decoded.updatedPni)
? normalizePni(toTaggedPni(decoded.updatedPni), 'Envelope.updatedPni')
: null,
// Sadly not captured previously
messageAgeSec: 0,
reportingToken: decoded.reportSpamToken?.length
? decoded.reportSpamToken
: null,
});
} catch (error) {
logger.warn('failed to migrate unprocessed', id, error);
}
}
db.pragma('user_version = 1280');
})();
logger.info('updateToSchemaVersion1280: success!');
if (oldEnvelopes !== 0) {
logger.warn(`dropped ${oldEnvelopes} envelopes`);
}
}

View file

@ -1,34 +1,17 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
import type { WritableDB } from '../Interface';
export const version = 1290;
export default function updateToSchemaVersion1290(db: WritableDB): void {
const [query] = sql`
ALTER TABLE unprocessed RENAME COLUMN sourceDevice TO legacySourceDevice;
ALTER TABLE unprocessed ADD COLUMN sourceDevice INTEGER;
export function updateToSchemaVersion1290(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1290) {
return;
}
UPDATE unprocessed
SET sourceDevice = legacySourceDevice;
db.transaction(() => {
const [query] = sql`
ALTER TABLE unprocessed RENAME COLUMN sourceDevice TO legacySourceDevice;
ALTER TABLE unprocessed ADD COLUMN sourceDevice INTEGER;
UPDATE unprocessed
SET sourceDevice = legacySourceDevice;
ALTER TABLE unprocessed DROP COLUMN legacySourceDevice;
`;
db.exec(query);
db.pragma('user_version = 1290');
})();
logger.info('updateToSchemaVersion1290: success!');
ALTER TABLE unprocessed DROP COLUMN legacySourceDevice;
`;
db.exec(query);
}

View file

@ -1,35 +1,18 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
import type { WritableDB } from '../Interface';
export const version = 1300;
export default function updateToSchemaVersion1300(db: WritableDB): void {
const [query] = sql`
ALTER TABLE sticker_references
ADD COLUMN stickerId INTEGER NOT NULL DEFAULT -1;
ALTER TABLE sticker_references
ADD COLUMN isUnresolved INTEGER NOT NULL DEFAULT 0;
export function updateToSchemaVersion1300(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1300) {
return;
}
db.transaction(() => {
const [query] = sql`
ALTER TABLE sticker_references
ADD COLUMN stickerId INTEGER NOT NULL DEFAULT -1;
ALTER TABLE sticker_references
ADD COLUMN isUnresolved INTEGER NOT NULL DEFAULT 0;
CREATE INDEX unresolved_sticker_refs
ON sticker_references (packId, stickerId)
WHERE isUnresolved IS 1;
`;
db.exec(query);
db.pragma('user_version = 1300');
})();
logger.info('updateToSchemaVersion1300: success!');
CREATE INDEX unresolved_sticker_refs
ON sticker_references (packId, stickerId)
WHERE isUnresolved IS 1;
`;
db.exec(query);
}

View file

@ -4,37 +4,24 @@ import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
import type { WritableDB } from '../Interface';
export const version = 1310;
// Value from ts/util/timestamp.ts at the time of creation of this migration
const MAX_SAFE_DATE = 8640000000000000;
export function updateToSchemaVersion1310(
currentVersion: number,
export default function updateToSchemaVersion1310(
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1310) {
return;
const [query, params] = sql`
UPDATE conversations
SET json = json_replace(
json,
'$.muteExpiresAt',
9007199254740991 -- max safe integer
)
WHERE json ->> '$.muteExpiresAt' IS ${MAX_SAFE_DATE};
`;
const { changes } = db.prepare(query).run(params);
if (changes !== 0) {
logger.warn(`fixed ${changes} conversations`);
}
db.transaction(() => {
const [query, params] = sql`
UPDATE conversations
SET json = json_replace(
json,
'$.muteExpiresAt',
9007199254740991 -- max safe integer
)
WHERE json ->> '$.muteExpiresAt' IS ${MAX_SAFE_DATE};
`;
const { changes } = db.prepare(query).run(params);
if (changes !== 0) {
logger.warn(`updateToSchemaVersion1310: fixed ${changes} conversations`);
}
db.pragma('user_version = 1310');
})();
logger.info('updateToSchemaVersion1310: success!');
}

View file

@ -1,38 +1,21 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
import type { WritableDB } from '../Interface';
export const version = 1320;
export default function updateToSchemaVersion1320(db: WritableDB): void {
const [query] = sql`
DROP INDEX unprocessed_timestamp;
export function updateToSchemaVersion1320(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1320) {
return;
}
ALTER TABLE unprocessed
ADD COLUMN receivedAtDate INTEGER DEFAULT 0 NOT NULL;
db.transaction(() => {
const [query] = sql`
DROP INDEX unprocessed_timestamp;
UPDATE unprocessed
SET receivedAtDate = timestamp;
ALTER TABLE unprocessed
ADD COLUMN receivedAtDate INTEGER DEFAULT 0 NOT NULL;
UPDATE unprocessed
SET receivedAtDate = timestamp;
CREATE INDEX unprocessed_byReceivedAtDate ON unprocessed
(receivedAtDate);
`;
db.exec(query);
db.pragma('user_version = 1320');
})();
logger.info('updateToSchemaVersion1320: success!');
CREATE INDEX unprocessed_byReceivedAtDate ON unprocessed
(receivedAtDate);
`;
db.exec(query);
}

View file

@ -1,29 +1,12 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1330;
export default function updateToSchemaVersion1330(db: Database): void {
const [query] = sql`
CREATE INDEX syncTasks_type ON syncTasks (type);
`;
export function updateToSchemaVersion1330(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1330) {
return;
}
db.transaction(() => {
const [query] = sql`
CREATE INDEX syncTasks_type ON syncTasks (type);
`;
db.exec(query);
db.pragma('user_version = 1330');
})();
logger.info('updateToSchemaVersion1330: success!');
db.exec(query);
}

View file

@ -1,44 +1,27 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1340;
export default function updateToSchemaVersion1340(db: Database): void {
const [query] = sql`
CREATE TABLE recentGifs (
id TEXT NOT NULL PRIMARY KEY,
title TEXT NOT NULL,
description TEXT NOT NULL,
previewMedia_url TEXT NOT NULL,
previewMedia_width INTEGER NOT NULL,
previewMedia_height INTEGER NOT NULL,
attachmentMedia_url TEXT NOT NULL,
attachmentMedia_width INTEGER NOT NULL,
attachmentMedia_height INTEGER NOT NULL,
lastUsedAt INTEGER NOT NULL
) STRICT;
export function updateToSchemaVersion1340(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1340) {
return;
}
CREATE INDEX recentGifs_order ON recentGifs (
lastUsedAt DESC
);
`;
db.transaction(() => {
const [query] = sql`
CREATE TABLE recentGifs (
id TEXT NOT NULL PRIMARY KEY,
title TEXT NOT NULL,
description TEXT NOT NULL,
previewMedia_url TEXT NOT NULL,
previewMedia_width INTEGER NOT NULL,
previewMedia_height INTEGER NOT NULL,
attachmentMedia_url TEXT NOT NULL,
attachmentMedia_width INTEGER NOT NULL,
attachmentMedia_height INTEGER NOT NULL,
lastUsedAt INTEGER NOT NULL
) STRICT;
CREATE INDEX recentGifs_order ON recentGifs (
lastUsedAt DESC
);
`;
db.exec(query);
db.pragma('user_version = 1340');
})();
logger.info('updateToSchemaVersion1340: success!');
db.exec(query);
}

View file

@ -1,58 +1,41 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export const version = 1350;
export default function updateToSchemaVersion1350(db: Database): void {
const [query] = sql`
CREATE TABLE notificationProfiles(
id TEXT PRIMARY KEY NOT NULL,
export function updateToSchemaVersion1350(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 1350) {
return;
}
name TEXT NOT NULL,
emoji TEXT,
/* A numeric representation of a color, like 0xAARRGGBB */
color INTEGER NOT NULL,
db.transaction(() => {
const [query] = sql`
CREATE TABLE notificationProfiles(
id TEXT PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
emoji TEXT,
/* A numeric representation of a color, like 0xAARRGGBB */
color INTEGER NOT NULL,
createdAtMs INTEGER NOT NULL,
allowAllCalls INTEGER NOT NULL,
allowAllMentions INTEGER NOT NULL,
createdAtMs INTEGER NOT NULL,
/* A JSON array of conversationId strings */
allowedMembersJson TEXT NOT NULL,
scheduleEnabled INTEGER NOT NULL,
/* 24-hour clock int, 0000-2359 (e.g., 15, 900, 1130, 2345) */
scheduleStartTime INTEGER,
scheduleEndTime INTEGER,
allowAllCalls INTEGER NOT NULL,
allowAllMentions INTEGER NOT NULL,
/* A JSON object with true/false for each of the numbers in the Protobuf enum */
scheduleDaysEnabledJson TEXT,
deletedAtTimestampMs INTEGER,
/* A JSON array of conversationId strings */
allowedMembersJson TEXT NOT NULL,
scheduleEnabled INTEGER NOT NULL,
storageID TEXT,
storageVersion INTEGER,
storageUnknownFields BLOB,
storageNeedsSync INTEGER NOT NULL DEFAULT 0
) STRICT;
`;
/* 24-hour clock int, 0000-2359 (e.g., 15, 900, 1130, 2345) */
scheduleStartTime INTEGER,
scheduleEndTime INTEGER,
db.exec(query);
/* A JSON object with true/false for each of the numbers in the Protobuf enum */
scheduleDaysEnabledJson TEXT,
deletedAtTimestampMs INTEGER,
db.pragma('user_version = 1350');
})();
storageID TEXT,
storageVersion INTEGER,
storageUnknownFields BLOB,
storageNeedsSync INTEGER NOT NULL DEFAULT 0
) STRICT;
`;
logger.info('updateToSchemaVersion1350: success!');
db.exec(query);
}

View file

@ -1,119 +1,102 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import type { WritableDB } from '../Interface';
export const version = 1360;
export default function updateToSchemaVersion1360(db: WritableDB): void {
db.exec(`
DROP TABLE IF EXISTS message_attachments;
`);
export function updateToSchemaVersion1360(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1360) {
return;
}
db.exec(`
CREATE TABLE message_attachments (
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
-- For editHistoryIndex to be part of the primary key, it cannot be NULL in strict tables.
-- For that reason, we use a value of -1 to indicate that it is the root message (not in editHistory)
editHistoryIndex INTEGER NOT NULL,
attachmentType TEXT NOT NULL, -- 'long-message' | 'quote' | 'attachment' | 'preview' | 'contact' | 'sticker'
orderInMessage INTEGER NOT NULL,
conversationId TEXT NOT NULL,
sentAt INTEGER NOT NULL,
clientUuid TEXT,
size INTEGER NOT NULL,
contentType TEXT NOT NULL,
path TEXT,
plaintextHash TEXT,
localKey TEXT,
caption TEXT,
fileName TEXT,
blurHash TEXT,
height INTEGER,
width INTEGER,
digest TEXT,
key TEXT,
iv TEXT,
downloadPath TEXT,
version INTEGER,
incrementalMac TEXT,
incrementalMacChunkSize INTEGER,
transitCdnKey TEXT,
transitCdnNumber INTEGER,
transitCdnUploadTimestamp INTEGER,
backupMediaName TEXT,
backupCdnNumber INTEGER,
isReencryptableToSameDigest INTEGER,
reencryptionIv TEXT,
reencryptionKey TEXT,
reencryptionDigest TEXT,
thumbnailPath TEXT,
thumbnailSize INTEGER,
thumbnailContentType TEXT,
thumbnailLocalKey TEXT,
thumbnailVersion INTEGER,
screenshotPath TEXT,
screenshotSize INTEGER,
screenshotContentType TEXT,
screenshotLocalKey TEXT,
screenshotVersion INTEGER,
backupThumbnailPath TEXT,
backupThumbnailSize INTEGER,
backupThumbnailContentType TEXT,
backupThumbnailLocalKey TEXT,
backupThumbnailVersion INTEGER,
storyTextAttachmentJson TEXT,
localBackupPath TEXT,
flags INTEGER,
error INTEGER,
wasTooBig INTEGER,
isCorrupted INTEGER,
copiedFromQuotedAttachment INTEGER,
pending INTEGER,
backfillError INTEGER,
PRIMARY KEY (messageId, editHistoryIndex, attachmentType, orderInMessage)
) STRICT;
`);
db.transaction(() => {
db.exec(`
DROP TABLE IF EXISTS message_attachments;
`);
// The following indexes were removed in migration 1370
db.exec(`
CREATE TABLE message_attachments (
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
-- For editHistoryIndex to be part of the primary key, it cannot be NULL in strict tables.
-- For that reason, we use a value of -1 to indicate that it is the root message (not in editHistory)
editHistoryIndex INTEGER NOT NULL,
attachmentType TEXT NOT NULL, -- 'long-message' | 'quote' | 'attachment' | 'preview' | 'contact' | 'sticker'
orderInMessage INTEGER NOT NULL,
conversationId TEXT NOT NULL,
sentAt INTEGER NOT NULL,
clientUuid TEXT,
size INTEGER NOT NULL,
contentType TEXT NOT NULL,
path TEXT,
plaintextHash TEXT,
localKey TEXT,
caption TEXT,
fileName TEXT,
blurHash TEXT,
height INTEGER,
width INTEGER,
digest TEXT,
key TEXT,
iv TEXT,
downloadPath TEXT,
version INTEGER,
incrementalMac TEXT,
incrementalMacChunkSize INTEGER,
transitCdnKey TEXT,
transitCdnNumber INTEGER,
transitCdnUploadTimestamp INTEGER,
backupMediaName TEXT,
backupCdnNumber INTEGER,
isReencryptableToSameDigest INTEGER,
reencryptionIv TEXT,
reencryptionKey TEXT,
reencryptionDigest TEXT,
thumbnailPath TEXT,
thumbnailSize INTEGER,
thumbnailContentType TEXT,
thumbnailLocalKey TEXT,
thumbnailVersion INTEGER,
screenshotPath TEXT,
screenshotSize INTEGER,
screenshotContentType TEXT,
screenshotLocalKey TEXT,
screenshotVersion INTEGER,
backupThumbnailPath TEXT,
backupThumbnailSize INTEGER,
backupThumbnailContentType TEXT,
backupThumbnailLocalKey TEXT,
backupThumbnailVersion INTEGER,
storyTextAttachmentJson TEXT,
localBackupPath TEXT,
flags INTEGER,
error INTEGER,
wasTooBig INTEGER,
isCorrupted INTEGER,
copiedFromQuotedAttachment INTEGER,
pending INTEGER,
backfillError INTEGER,
PRIMARY KEY (messageId, editHistoryIndex, attachmentType, orderInMessage)
) STRICT;
`);
// The following indexes were removed in migration 1370
// db.exec(
// 'CREATE INDEX message_attachments_messageId
// ON message_attachments (messageId);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_plaintextHash
// ON message_attachments (plaintextHash);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_path
// ON message_attachments (path);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_thumbnailPath
// ON message_attachments (thumbnailPath);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_screenshotPath
// ON message_attachments (screenshotPath);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_backupThumbnailPath
// ON message_attachments (backupThumbnailPath);'
// );
db.pragma('user_version = 1360');
})();
logger.info('updateToSchemaVersion1360: success!');
// db.exec(
// 'CREATE INDEX message_attachments_messageId
// ON message_attachments (messageId);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_plaintextHash
// ON message_attachments (plaintextHash);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_path
// ON message_attachments (path);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_thumbnailPath
// ON message_attachments (thumbnailPath);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_screenshotPath
// ON message_attachments (screenshotPath);'
// );
// db.exec(
// 'CREATE INDEX message_attachments_all_backupThumbnailPath
// ON message_attachments (backupThumbnailPath);'
// );
}

View file

@ -1,31 +1,15 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import type { WritableDB } from '../Interface';
export const version = 1370;
export function updateToSchemaVersion1370(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1370) {
return;
}
db.transaction(() => {
db.exec(`
DROP INDEX IF EXISTS message_attachments_messageId;
DROP INDEX IF EXISTS message_attachments_plaintextHash;
DROP INDEX IF EXISTS message_attachments_path;
DROP INDEX IF EXISTS message_attachments_all_thumbnailPath;
DROP INDEX IF EXISTS message_attachments_all_screenshotPath;
DROP INDEX IF EXISTS message_attachments_all_backupThumbnailPath;
`);
db.pragma('user_version = 1370');
})();
logger.info('updateToSchemaVersion1370: success!');
export default function updateToSchemaVersion1370(db: WritableDB): void {
db.exec(`
DROP INDEX IF EXISTS message_attachments_messageId;
DROP INDEX IF EXISTS message_attachments_plaintextHash;
DROP INDEX IF EXISTS message_attachments_path;
DROP INDEX IF EXISTS message_attachments_all_thumbnailPath;
DROP INDEX IF EXISTS message_attachments_all_screenshotPath;
DROP INDEX IF EXISTS message_attachments_all_backupThumbnailPath;
`);
}

View file

@ -1,35 +1,19 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import type { WritableDB } from '../Interface';
export const version = 1380;
export default function updateToSchemaVersion1380(db: WritableDB): void {
db.exec(`
CREATE TABLE donationReceipts(
id TEXT NOT NULL PRIMARY KEY,
currencyType TEXT NOT NULL,
paymentAmount INTEGER NOT NULL,
paymentDetailJson TEXT NOT NULL,
paymentType TEXT NOT NULL,
timestamp INTEGER NOT NULL
) STRICT;
export function updateToSchemaVersion1380(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1380) {
return;
}
db.transaction(() => {
db.exec(`
CREATE TABLE donationReceipts(
id TEXT NOT NULL PRIMARY KEY,
currencyType TEXT NOT NULL,
paymentAmount INTEGER NOT NULL,
paymentDetailJson TEXT NOT NULL,
paymentType TEXT NOT NULL,
timestamp INTEGER NOT NULL
) STRICT;
CREATE INDEX donationReceipts_byTimestamp on donationReceipts(timestamp);
`);
db.pragma('user_version = 1380');
})();
logger.info('updateToSchemaVersion1380: success!');
CREATE INDEX donationReceipts_byTimestamp on donationReceipts(timestamp);
`);
}

View file

@ -1,53 +1,36 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { type WritableDB } from '../Interface';
export const version = 1390;
export default function updateToSchemaVersion1390(db: WritableDB): void {
// TODO: DESKTOP-8879 Digest column is only used for deduplication purposes; here we
// genericize its name to attachmentSignature to allow jobs to be added with
// plaintextHash and no digest
db.exec(`
ALTER TABLE attachment_downloads
RENAME COLUMN digest TO attachmentSignature;
`);
export function updateToSchemaVersion1390(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1390) {
return;
}
// We no longer these need columns due to the new mediaName derivation
db.exec(`
ALTER TABLE message_attachments
DROP COLUMN iv;
ALTER TABLE message_attachments
DROP COLUMN isReencryptableToSameDigest;
ALTER TABLE message_attachments
DROP COLUMN reencryptionIv;
ALTER TABLE message_attachments
DROP COLUMN reencryptionKey;
ALTER TABLE message_attachments
DROP COLUMN reencryptionDigest;
ALTER TABLE message_attachments
DROP COLUMN backupMediaName;
`);
db.transaction(() => {
// TODO: DESKTOP-8879 Digest column is only used for deduplication purposes; here we
// genericize its name to attachmentSignature to allow jobs to be added with
// plaintextHash and no digest
db.exec(`
ALTER TABLE attachment_downloads
RENAME COLUMN digest TO attachmentSignature;
`);
// We no longer these need columns due to the new mediaName derivation
db.exec(`
ALTER TABLE message_attachments
DROP COLUMN iv;
ALTER TABLE message_attachments
DROP COLUMN isReencryptableToSameDigest;
ALTER TABLE message_attachments
DROP COLUMN reencryptionIv;
ALTER TABLE message_attachments
DROP COLUMN reencryptionKey;
ALTER TABLE message_attachments
DROP COLUMN reencryptionDigest;
ALTER TABLE message_attachments
DROP COLUMN backupMediaName;
`);
// Because mediaName has changed, backupCdnNumber is no longer accurate
db.exec(`
UPDATE message_attachments
SET backupCdnNumber = NULL;
`);
db.pragma('user_version = 1390');
})();
logger.info('updateToSchemaVersion1390: success!');
// Because mediaName has changed, backupCdnNumber is no longer accurate
db.exec(`
UPDATE message_attachments
SET backupCdnNumber = NULL;
`);
}

View file

@ -1,27 +1,11 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import type { WritableDB } from '../Interface';
export const version = 1400;
export function updateToSchemaVersion1400(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1400) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE donationReceipts DROP COLUMN paymentDetailJson;
ALTER TABLE donationReceipts DROP COLUMN paymentType;
`);
db.pragma('user_version = 1400');
})();
logger.info('updateToSchemaVersion1400: success!');
export default function updateToSchemaVersion1400(db: WritableDB): void {
db.exec(`
ALTER TABLE donationReceipts DROP COLUMN paymentDetailJson;
ALTER TABLE donationReceipts DROP COLUMN paymentType;
`);
}

View file

@ -1,41 +1,24 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { type WritableDB } from '../Interface';
export const version = 1410;
export default function updateToSchemaVersion1410(db: WritableDB): void {
db.exec(`
UPDATE conversations
SET json = json_remove(json,
'$.wallpaperPreset',
'$.wallpaperPhotoPointerBase64',
'$.dimWallpaperInDarkMode',
'$.autoBubbleColor'
);
export function updateToSchemaVersion1410(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1410) {
return;
}
db.transaction(() => {
db.exec(`
UPDATE conversations
SET json = json_remove(json,
'$.wallpaperPreset',
'$.wallpaperPhotoPointerBase64',
'$.dimWallpaperInDarkMode',
'$.autoBubbleColor'
);
DELETE FROM items
WHERE id IN (
'defaultWallpaperPhotoPointer',
'defaultWallpaperPreset',
'defaultDimWallpaperInDarkMode',
'defaultAutoBubbleColor'
);
`);
db.pragma('user_version = 1410');
})();
logger.info('updateToSchemaVersion1410: success!');
DELETE FROM items
WHERE id IN (
'defaultWallpaperPhotoPointer',
'defaultWallpaperPreset',
'defaultDimWallpaperInDarkMode',
'defaultAutoBubbleColor'
);
`);
}

View file

@ -1,70 +1,53 @@
// Copyright 2025 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { AttachmentDownloadSource, type WritableDB } from '../Interface';
export const version = 1420;
export default function updateToSchemaVersion1420(db: WritableDB): void {
db.exec(`
ALTER TABLE attachment_downloads
ADD COLUMN originalSource TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
export function updateToSchemaVersion1420(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 1420) {
return;
}
UPDATE attachment_downloads
SET originalSource = source;
`);
db.transaction(() => {
db.exec(`
ALTER TABLE attachment_downloads
ADD COLUMN originalSource TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
UPDATE attachment_downloads
SET originalSource = source;
`);
db.exec(`
CREATE TABLE attachment_downloads_backup_stats (
id INTEGER PRIMARY KEY CHECK (id = 0),
totalBytes INTEGER NOT NULL,
completedBytes INTEGER NOT NULL
) STRICT;
db.exec(`
CREATE TABLE attachment_downloads_backup_stats (
id INTEGER PRIMARY KEY CHECK (id = 0),
totalBytes INTEGER NOT NULL,
completedBytes INTEGER NOT NULL
) STRICT;
INSERT INTO attachment_downloads_backup_stats
(id, totalBytes, completedBytes)
VALUES
(0, 0, 0);
INSERT INTO attachment_downloads_backup_stats
(id, totalBytes, completedBytes)
VALUES
(0, 0, 0);
CREATE TRIGGER attachment_downloads_backup_job_insert
AFTER INSERT ON attachment_downloads
WHEN NEW.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
totalBytes = totalBytes + NEW.ciphertextSize;
END;
CREATE TRIGGER attachment_downloads_backup_job_insert
AFTER INSERT ON attachment_downloads
WHEN NEW.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
totalBytes = totalBytes + NEW.ciphertextSize;
END;
CREATE TRIGGER attachment_downloads_backup_job_update
AFTER UPDATE OF ciphertextSize ON attachment_downloads
WHEN NEW.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
totalBytes = MAX(0, totalBytes - OLD.ciphertextSize + NEW.ciphertextSize)
WHERE id = 0;
END;
CREATE TRIGGER attachment_downloads_backup_job_update
AFTER UPDATE OF ciphertextSize ON attachment_downloads
WHEN NEW.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
totalBytes = MAX(0, totalBytes - OLD.ciphertextSize + NEW.ciphertextSize)
WHERE id = 0;
END;
CREATE TRIGGER attachment_downloads_backup_job_delete
AFTER DELETE ON attachment_downloads
WHEN OLD.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
completedBytes = completedBytes + OLD.ciphertextSize
WHERE id = 0;
END;
`);
db.pragma('user_version = 1420');
})();
logger.info('updateToSchemaVersion1420: success!');
CREATE TRIGGER attachment_downloads_backup_job_delete
AFTER DELETE ON attachment_downloads
WHEN OLD.originalSource = 'backup_import'
BEGIN
UPDATE attachment_downloads_backup_stats SET
completedBytes = completedBytes + OLD.ciphertextSize
WHERE id = 0;
END;
`);
}

View file

@ -25,22 +25,17 @@ export function getOurUuid(db: ReadableDB): string | undefined {
}
export default function updateToSchemaVersion41(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 41) {
return;
}
const getConversationUuid = db.prepare(
`
SELECT uuid
FROM
conversations
WHERE
id = $conversationId
`,
SELECT uuid
FROM
conversations
WHERE
id = $conversationId
`,
{
pluck: true,
}
@ -377,70 +372,61 @@ export default function updateToSchemaVersion41(
logger.info(`Migrated ${migrated} identity keys`);
};
db.transaction(() => {
db.exec(
`
-- Change type of 'id' column from INTEGER to STRING
db.exec(
`
-- Change type of 'id' column from INTEGER to STRING
ALTER TABLE preKeys
RENAME TO old_preKeys;
ALTER TABLE preKeys
RENAME TO old_preKeys;
ALTER TABLE signedPreKeys
RENAME TO old_signedPreKeys;
ALTER TABLE signedPreKeys
RENAME TO old_signedPreKeys;
CREATE TABLE preKeys(
id STRING PRIMARY KEY ASC,
json TEXT
);
CREATE TABLE signedPreKeys(
id STRING PRIMARY KEY ASC,
json TEXT
);
-- sqlite handles the type conversion
INSERT INTO preKeys SELECT * FROM old_preKeys;
INSERT INTO signedPreKeys SELECT * FROM old_signedPreKeys;
DROP TABLE old_preKeys;
DROP TABLE old_signedPreKeys;
-- Alter sessions
ALTER TABLE sessions
ADD COLUMN ourUuid STRING;
ALTER TABLE sessions
ADD COLUMN uuid STRING;
`
CREATE TABLE preKeys(
id STRING PRIMARY KEY ASC,
json TEXT
);
CREATE TABLE signedPreKeys(
id STRING PRIMARY KEY ASC,
json TEXT
);
const ourUuid = getOurUuid(db);
-- sqlite handles the type conversion
INSERT INTO preKeys SELECT * FROM old_preKeys;
INSERT INTO signedPreKeys SELECT * FROM old_signedPreKeys;
if (!isValidUuid(ourUuid)) {
const deleteCount = clearSessionsAndKeys();
DROP TABLE old_preKeys;
DROP TABLE old_signedPreKeys;
if (deleteCount > 0) {
logger.error(
'updateToSchemaVersion41: no uuid is available, ' +
`erased ${deleteCount} sessions/keys`
);
}
-- Alter sessions
db.pragma('user_version = 41');
return;
ALTER TABLE sessions
ADD COLUMN ourUuid STRING;
ALTER TABLE sessions
ADD COLUMN uuid STRING;
`
);
const ourUuid = getOurUuid(db);
if (!isValidUuid(ourUuid)) {
const deleteCount = clearSessionsAndKeys();
if (deleteCount > 0) {
logger.error(`no uuid is available, erased ${deleteCount} sessions/keys`);
}
prefixKeys(ourUuid);
return;
}
updateSenderKeys(ourUuid);
prefixKeys(ourUuid);
updateSessions(ourUuid);
updateSenderKeys(ourUuid);
moveIdentityKeyToMap(ourUuid);
updateSessions(ourUuid);
updateIdentityKeys();
moveIdentityKeyToMap(ourUuid);
db.pragma('user_version = 41');
})();
logger.info('updateToSchemaVersion41: success!');
updateIdentityKeys();
}

View file

@ -6,75 +6,62 @@ import type { WritableDB } from '../Interface';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion42(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 42) {
return;
// First, recreate messages table delete trigger with reaction support
db.exec(`
DROP TRIGGER messages_on_delete;
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = old.id
);
DELETE FROM reactions WHERE rowid IN (
SELECT rowid FROM reactions
WHERE messageId = old.id
);
END;
`);
// Then, delete previously-orphaned reactions
// Note: we use `pluck` here to fetch only the first column of
// returned row.
const messageIdList: Array<string> = db
.prepare('SELECT id FROM messages ORDER BY id ASC;', {
pluck: true,
})
.all();
const allReactions: Array<{
rowid: number;
messageId: string;
}> = db.prepare('SELECT rowid, messageId FROM reactions;').all();
const messageIds = new Set(messageIdList);
const reactionsToDelete: Array<number> = [];
allReactions.forEach(reaction => {
if (!messageIds.has(reaction.messageId)) {
reactionsToDelete.push(reaction.rowid);
}
});
function deleteReactions(rowids: ReadonlyArray<number>, persistent: boolean) {
db.prepare(
`
DELETE FROM reactions
WHERE rowid IN ( ${rowids.map(() => '?').join(', ')} );
`,
{ persistent }
).run(rowids);
}
db.transaction(() => {
// First, recreate messages table delete trigger with reaction support
db.exec(`
DROP TRIGGER messages_on_delete;
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = old.id
);
DELETE FROM reactions WHERE rowid IN (
SELECT rowid FROM reactions
WHERE messageId = old.id
);
END;
`);
// Then, delete previously-orphaned reactions
// Note: we use `pluck` here to fetch only the first column of
// returned row.
const messageIdList: Array<string> = db
.prepare('SELECT id FROM messages ORDER BY id ASC;', {
pluck: true,
})
.all();
const allReactions: Array<{
rowid: number;
messageId: string;
}> = db.prepare('SELECT rowid, messageId FROM reactions;').all();
const messageIds = new Set(messageIdList);
const reactionsToDelete: Array<number> = [];
allReactions.forEach(reaction => {
if (!messageIds.has(reaction.messageId)) {
reactionsToDelete.push(reaction.rowid);
}
});
function deleteReactions(
rowids: ReadonlyArray<number>,
persistent: boolean
) {
db.prepare(
`
DELETE FROM reactions
WHERE rowid IN ( ${rowids.map(() => '?').join(', ')} );
`,
{ persistent }
).run(rowids);
}
if (reactionsToDelete.length > 0) {
logger.info(`Deleting ${reactionsToDelete.length} orphaned reactions`);
batchMultiVarQuery(db, reactionsToDelete, deleteReactions);
}
db.pragma('user_version = 42');
})();
logger.info('updateToSchemaVersion42: success!');
if (reactionsToDelete.length > 0) {
logger.info(`Deleting ${reactionsToDelete.length} orphaned reactions`);
batchMultiVarQuery(db, reactionsToDelete, deleteReactions);
}
}

View file

@ -33,14 +33,9 @@ type ConversationType = Readonly<{
}>;
export default function updateToSchemaVersion43(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 43) {
return;
}
type LegacyPendingMemberType = {
addedByUserId?: string;
conversationId: string;
@ -117,8 +112,7 @@ export default function updateToSchemaVersion43(
});
if (!uuid) {
logger.warn(
`updateToSchemaVersion43: ${logId}.${key} UUID not found ` +
`for ${member.conversationId}`
`${logId}.${key} UUID not found for ${member.conversationId}`
);
return undefined;
}
@ -158,15 +152,14 @@ export default function updateToSchemaVersion43(
if (oldValue.length !== 0) {
logger.info(
`updateToSchemaVersion43: migrated ${oldValue.length} ${key} ` +
`migrated ${oldValue.length} ${key} ` +
`entries to ${newValue.length} for ${logId}`
);
}
if (addedByCount > 0) {
logger.info(
`updateToSchemaVersion43: migrated ${addedByCount} addedByUserId ` +
`in ${key} for ${logId}`
`migrated ${addedByCount} addedByUserId in ${key} for ${logId}`
);
}
}
@ -286,7 +279,7 @@ export default function updateToSchemaVersion43(
}
if (!newValue) {
logger.warn(
`updateToSchemaVersion43: ${id}.groupV2Change.details.${key} ` +
`${id}.groupV2Change.details.${key} ` +
`UUID not found for ${oldValue}`
);
return undefined;
@ -342,7 +335,7 @@ export default function updateToSchemaVersion43(
if (!uuid) {
logger.warn(
`updateToSchemaVersion43: ${id}.invitedGV2Members UUID ` +
`${id}.invitedGV2Members UUID ` +
`not found for ${conversationId}`
);
return undefined;
@ -390,44 +383,35 @@ export default function updateToSchemaVersion43(
return true;
};
db.transaction(() => {
const allConversations = db
.prepare(
`
SELECT json
FROM conversations
ORDER BY id ASC;
`,
{ pluck: true }
)
.all<string>()
.map(json => jsonToObject<ConversationType>(json));
const allConversations = db
.prepare(
`
SELECT json
FROM conversations
ORDER BY id ASC;
`,
{ pluck: true }
)
.all<string>()
.map(json => jsonToObject<ConversationType>(json));
logger.info(
'updateToSchemaVersion43: About to iterate through ' +
`${allConversations.length} conversations`
);
logger.info(
`About to iterate through ${allConversations.length} conversations`
);
for (const convo of allConversations) {
upgradeConversation(convo);
for (const convo of allConversations) {
upgradeConversation(convo);
}
const messageCount = getCountFromTable(db, 'messages');
logger.info(`About to iterate through ${messageCount} messages`);
let updatedCount = 0;
for (const message of new TableIterator<MessageType>(db, 'messages')) {
if (upgradeMessage(message)) {
updatedCount += 1;
}
}
const messageCount = getCountFromTable(db, 'messages');
logger.info(
'updateToSchemaVersion43: About to iterate through ' +
`${messageCount} messages`
);
let updatedCount = 0;
for (const message of new TableIterator<MessageType>(db, 'messages')) {
if (upgradeMessage(message)) {
updatedCount += 1;
}
}
logger.info(`updateToSchemaVersion43: Updated ${updatedCount} messages`);
db.pragma('user_version = 43');
})();
logger.info('updateToSchemaVersion43: success!');
logger.info(`Updated ${updatedCount} messages`);
}

View file

@ -3,41 +3,25 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion44(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 44) {
return;
}
db.transaction(() => {
db.exec(
`
CREATE TABLE badges(
id TEXT PRIMARY KEY,
category TEXT NOT NULL,
name TEXT NOT NULL,
descriptionTemplate TEXT NOT NULL
);
CREATE TABLE badgeImageFiles(
badgeId TEXT REFERENCES badges(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
'order' INTEGER NOT NULL,
url TEXT NOT NULL,
localPath TEXT,
theme TEXT NOT NULL
);
`
export default function updateToSchemaVersion44(db: Database): void {
db.exec(
`
CREATE TABLE badges(
id TEXT PRIMARY KEY,
category TEXT NOT NULL,
name TEXT NOT NULL,
descriptionTemplate TEXT NOT NULL
);
db.pragma('user_version = 44');
})();
logger.info('updateToSchemaVersion44: success!');
CREATE TABLE badgeImageFiles(
badgeId TEXT REFERENCES badges(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
'order' INTEGER NOT NULL,
url TEXT NOT NULL,
localPath TEXT,
theme TEXT NOT NULL
);
`
);
}

View file

@ -3,135 +3,119 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion45(db: Database): void {
db.exec(
`
--- Add column to messages table
export default function updateToSchemaVersion45(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 45) {
return;
}
ALTER TABLE messages ADD COLUMN storyId STRING;
db.transaction(() => {
db.exec(
`
--- Add column to messages table
--- Update important message indices
ALTER TABLE messages ADD COLUMN storyId STRING;
DROP INDEX messages_conversation;
CREATE INDEX messages_conversation ON messages
(conversationId, type, storyId, received_at);
--- Update important message indices
DROP INDEX messages_unread;
CREATE INDEX messages_unread ON messages
(conversationId, readStatus, type, storyId) WHERE readStatus IS NOT NULL;
DROP INDEX messages_conversation;
CREATE INDEX messages_conversation ON messages
(conversationId, type, storyId, received_at);
--- Update attachment indices for All Media views
DROP INDEX messages_unread;
CREATE INDEX messages_unread ON messages
(conversationId, readStatus, type, storyId) WHERE readStatus IS NOT NULL;
DROP INDEX messages_hasAttachments;
CREATE INDEX messages_hasAttachments
ON messages (conversationId, hasAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
--- Update attachment indices for All Media views
DROP INDEX messages_hasFileAttachments;
CREATE INDEX messages_hasFileAttachments
ON messages (conversationId, hasFileAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
DROP INDEX messages_hasAttachments;
CREATE INDEX messages_hasAttachments
ON messages (conversationId, hasAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
DROP INDEX messages_hasVisualMediaAttachments;
CREATE INDEX messages_hasVisualMediaAttachments
ON messages (conversationId, hasVisualMediaAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
DROP INDEX messages_hasFileAttachments;
CREATE INDEX messages_hasFileAttachments
ON messages (conversationId, hasFileAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
--- Message insert/update triggers to exclude stories and story replies
DROP INDEX messages_hasVisualMediaAttachments;
CREATE INDEX messages_hasVisualMediaAttachments
ON messages (conversationId, hasVisualMediaAttachments, received_at)
WHERE type IS NOT 'story' AND storyId IS NULL;
DROP TRIGGER messages_on_insert;
-- Note: any changes to this trigger must be reflected in
-- Server.ts: enableMessageInsertTriggersAndBackfill
CREATE TRIGGER messages_on_insert AFTER INSERT ON messages
WHEN new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
--- Message insert/update triggers to exclude stories and story replies
DROP TRIGGER messages_on_update;
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
WHEN
(new.body IS NULL OR old.body IS NOT new.body) AND
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
DROP TRIGGER messages_on_insert;
-- Note: any changes to this trigger must be reflected in
-- Server.ts: enableMessageInsertTriggersAndBackfill
CREATE TRIGGER messages_on_insert AFTER INSERT ON messages
WHEN new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
--- Update delete trigger to remove storyReads
DROP TRIGGER messages_on_update;
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
WHEN
(new.body IS NULL OR old.body IS NOT new.body) AND
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
--- Update delete trigger to remove storyReads
--- Note: for future updates to this trigger, be sure to update Server.ts/removeAll()
--- (it deletes and re-adds this trigger for performance)
DROP TRIGGER messages_on_delete;
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = old.id
);
DELETE FROM reactions WHERE rowid IN (
SELECT rowid FROM reactions
WHERE messageId = old.id
);
DELETE FROM storyReads WHERE storyId = old.storyId;
END;
--- Story Read History
CREATE TABLE storyReads (
authorId STRING NOT NULL,
conversationId STRING NOT NULL,
storyId STRING NOT NULL,
storyReadDate NUMBER NOT NULL,
PRIMARY KEY (authorId, storyId)
--- Note: for future updates to this trigger, be sure to update Server.ts/removeAll()
--- (it deletes and re-adds this trigger for performance)
DROP TRIGGER messages_on_delete;
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
DELETE FROM sendLogPayloads WHERE id IN (
SELECT payloadId FROM sendLogMessageIds
WHERE messageId = old.id
);
CREATE INDEX storyReads_data ON storyReads (
storyReadDate, authorId, conversationId
DELETE FROM reactions WHERE rowid IN (
SELECT rowid FROM reactions
WHERE messageId = old.id
);
DELETE FROM storyReads WHERE storyId = old.storyId;
END;
--- Story Distribution Lists
--- Story Read History
CREATE TABLE storyDistributions(
id STRING PRIMARY KEY NOT NULL,
name TEXT,
CREATE TABLE storyReads (
authorId STRING NOT NULL,
conversationId STRING NOT NULL,
storyId STRING NOT NULL,
storyReadDate NUMBER NOT NULL,
avatarUrlPath TEXT,
avatarKey BLOB,
senderKeyInfoJson STRING
);
CREATE TABLE storyDistributionMembers(
listId STRING NOT NULL REFERENCES storyDistributions(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
uuid STRING NOT NULL,
PRIMARY KEY (listId, uuid)
)
`
PRIMARY KEY (authorId, storyId)
);
db.pragma('user_version = 45');
})();
CREATE INDEX storyReads_data ON storyReads (
storyReadDate, authorId, conversationId
);
logger.info('updateToSchemaVersion45: success!');
--- Story Distribution Lists
CREATE TABLE storyDistributions(
id STRING PRIMARY KEY NOT NULL,
name TEXT,
avatarUrlPath TEXT,
avatarKey BLOB,
senderKeyInfoJson STRING
);
CREATE TABLE storyDistributionMembers(
listId STRING NOT NULL REFERENCES storyDistributions(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
uuid STRING NOT NULL,
PRIMARY KEY (listId, uuid)
)
`
);
}

View file

@ -3,38 +3,22 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion46(db: Database): void {
db.exec(
`
--- Add column to messages table
export default function updateToSchemaVersion46(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 46) {
return;
}
ALTER TABLE messages
ADD COLUMN
isStory INTEGER
GENERATED ALWAYS
AS (type = 'story');
db.transaction(() => {
db.exec(
`
--- Add column to messages table
--- Update important message indices
ALTER TABLE messages
ADD COLUMN
isStory INTEGER
GENERATED ALWAYS
AS (type = 'story');
--- Update important message indices
DROP INDEX messages_conversation;
CREATE INDEX messages_conversation ON messages
(conversationId, isStory, storyId, received_at, sent_at);
`
);
db.pragma('user_version = 46');
})();
logger.info('updateToSchemaVersion46: success!');
DROP INDEX messages_conversation;
CREATE INDEX messages_conversation ON messages
(conversationId, isStory, storyId, received_at, sent_at);
`
);
}

View file

@ -1,139 +1,128 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { LoggerType } from '../../types/Logging';
import { getOurUuid } from './41-uuid-keys';
import type { LoggerType } from '../../types/Logging';
import type { WritableDB } from '../Interface';
export default function updateToSchemaVersion47(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 47) {
return;
db.exec(
`
DROP INDEX messages_conversation;
ALTER TABLE messages
DROP COLUMN isStory;
ALTER TABLE messages
ADD COLUMN isStory INTEGER
GENERATED ALWAYS AS (type IS 'story');
ALTER TABLE messages
ADD COLUMN isChangeCreatedByUs INTEGER NOT NULL DEFAULT 0;
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change',
'keychange'
)
);
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change',
'group-v2-change',
'keychange'
)
);
ALTER TABLE messages
ADD COLUMN isTimerChangeFromSync INTEGER
GENERATED ALWAYS AS (
json_extract(json, '$.expirationTimerUpdate.fromSync') IS 1
);
ALTER TABLE messages
ADD COLUMN isGroupLeaveEvent INTEGER
GENERATED ALWAYS AS (
type IS 'group-v2-change' AND
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].uuid')
);
ALTER TABLE messages
ADD COLUMN isGroupLeaveEventFromOther INTEGER
GENERATED ALWAYS AS (
isGroupLeaveEvent IS 1
AND
isChangeCreatedByUs IS 0
);
CREATE INDEX messages_conversation ON messages
(conversationId, isStory, storyId, received_at, sent_at);
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
const ourUuid = getOurUuid(db);
if (!ourUuid) {
logger.info('our UUID not found');
} else {
db.prepare(
`
UPDATE messages SET
isChangeCreatedByUs = json_extract(json, '$.groupV2Change.from') IS $ourUuid;
`
).run({
ourUuid,
});
}
db.transaction(() => {
db.exec(
`
DROP INDEX messages_conversation;
ALTER TABLE messages
DROP COLUMN isStory;
ALTER TABLE messages
ADD COLUMN isStory INTEGER
GENERATED ALWAYS AS (type IS 'story');
ALTER TABLE messages
ADD COLUMN isChangeCreatedByUs INTEGER NOT NULL DEFAULT 0;
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change',
'keychange'
)
);
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'group-v1-migration',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change',
'group-v2-change',
'keychange'
)
);
ALTER TABLE messages
ADD COLUMN isTimerChangeFromSync INTEGER
GENERATED ALWAYS AS (
json_extract(json, '$.expirationTimerUpdate.fromSync') IS 1
);
ALTER TABLE messages
ADD COLUMN isGroupLeaveEvent INTEGER
GENERATED ALWAYS AS (
type IS 'group-v2-change' AND
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].uuid')
);
ALTER TABLE messages
ADD COLUMN isGroupLeaveEventFromOther INTEGER
GENERATED ALWAYS AS (
isGroupLeaveEvent IS 1
AND
isChangeCreatedByUs IS 0
);
CREATE INDEX messages_conversation ON messages
(conversationId, isStory, storyId, received_at, sent_at);
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
const ourUuid = getOurUuid(db);
if (!ourUuid) {
logger.info('updateToSchemaVersion47: our UUID not found');
} else {
db.prepare(
`
UPDATE messages SET
isChangeCreatedByUs = json_extract(json, '$.groupV2Change.from') IS $ourUuid;
`
).run({
ourUuid,
});
}
db.pragma('user_version = 47');
})();
logger.info('updateToSchemaVersion47: success!');
}

View file

@ -3,28 +3,12 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion48(db: Database): void {
db.exec(
`
DROP INDEX message_user_initiated;
export default function updateToSchemaVersion48(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 48) {
return;
}
db.transaction(() => {
db.exec(
`
DROP INDEX message_user_initiated;
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`
);
db.pragma('user_version = 48');
})();
logger.info('updateToSchemaVersion48: success!');
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`
);
}

View file

@ -3,31 +3,15 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion49(db: Database): void {
db.exec(
`
DROP INDEX messages_preview;
export default function updateToSchemaVersion49(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 49) {
return;
}
db.transaction(() => {
db.exec(
`
DROP INDEX messages_preview;
-- Note the omitted 'expiresAt' column in the index. If it is present
-- sqlite can't ORDER BY received_at, sent_at using this index.
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, received_at, sent_at);
`
);
db.pragma('user_version = 49');
})();
logger.info('updateToSchemaVersion49: success!');
-- Note the omitted 'expiresAt' column in the index. If it is present
-- sqlite can't ORDER BY received_at, sent_at using this index.
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, received_at, sent_at);
`
);
}

View file

@ -3,30 +3,14 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion50(db: Database): void {
db.exec(
`
DROP INDEX messages_unread;
export default function updateToSchemaVersion50(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 50) {
return;
}
db.transaction(() => {
db.exec(
`
DROP INDEX messages_unread;
-- Note: here we move to the modern isStory/storyId fields and add received_at/sent_at.
CREATE INDEX messages_unread ON messages
(conversationId, readStatus, isStory, storyId, received_at, sent_at) WHERE readStatus IS NOT NULL;
`
);
db.pragma('user_version = 50');
})();
logger.info('updateToSchemaVersion50: success!');
-- Note: here we move to the modern isStory/storyId fields and add received_at/sent_at.
CREATE INDEX messages_unread ON messages
(conversationId, readStatus, isStory, storyId, received_at, sent_at) WHERE readStatus IS NOT NULL;
`
);
}

View file

@ -7,102 +7,81 @@ import type { WritableDB } from '../Interface';
import { getJobsInQueue, insertJob } from '../Server';
export default function updateToSchemaVersion51(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 51) {
return;
}
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
db.transaction(() => {
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
// First, make sure that reactions job data has a type and conversationId
const reactionsJobs = getJobsInQueue(db, 'reactions');
deleteJobsInQueue.run({ queueType: 'reactions' });
// First, make sure that reactions job data has a type and conversationId
const reactionsJobs = getJobsInQueue(db, 'reactions');
deleteJobsInQueue.run({ queueType: 'reactions' });
const getMessageById = db.prepare(
'SELECT conversationId FROM messages WHERE id IS ?'
);
const getMessageById = db.prepare(
'SELECT conversationId FROM messages WHERE id IS ?'
);
reactionsJobs.forEach(job => {
const { data, id } = job;
reactionsJobs.forEach(job => {
const { data, id } = job;
if (!isRecord(data)) {
logger.warn(`reactions queue job ${id} was missing valid data`);
return;
}
if (!isRecord(data)) {
logger.warn(
`updateToSchemaVersion51: reactions queue job ${id} was missing valid data`
);
return;
}
const { messageId } = data;
if (typeof messageId !== 'string') {
logger.warn(`reactions queue job ${id} had a non-string messageId`);
return;
}
const { messageId } = data;
if (typeof messageId !== 'string') {
logger.warn(
`updateToSchemaVersion51: reactions queue job ${id} had a non-string messageId`
);
return;
}
const message = getMessageById.get([messageId]);
if (!message) {
logger.warn(`Unable to find message for reaction job ${id}`);
return;
}
const message = getMessageById.get([messageId]);
if (!message) {
logger.warn(
`updateToSchemaVersion51: Unable to find message for reaction job ${id}`
);
return;
}
const { conversationId } = message;
if (typeof conversationId !== 'string') {
logger.warn(`reactions queue job ${id} had a non-string conversationId`);
return;
}
const { conversationId } = message;
if (typeof conversationId !== 'string') {
logger.warn(
`updateToSchemaVersion51: reactions queue job ${id} had a non-string conversationId`
);
return;
}
const newJob = {
...job,
queueType: 'conversation',
data: {
...data,
type: 'Reaction',
conversationId,
},
};
const newJob = {
...job,
queueType: 'conversation',
data: {
...data,
type: 'Reaction',
conversationId,
},
};
insertJob(db, newJob);
});
insertJob(db, newJob);
});
// Then make sure all normal send job data has a type
const normalSendJobs = getJobsInQueue(db, 'normal send');
deleteJobsInQueue.run({ queueType: 'normal send' });
// Then make sure all normal send job data has a type
const normalSendJobs = getJobsInQueue(db, 'normal send');
deleteJobsInQueue.run({ queueType: 'normal send' });
normalSendJobs.forEach(job => {
const { data, id } = job;
normalSendJobs.forEach(job => {
const { data, id } = job;
if (!isRecord(data)) {
logger.warn(`normal send queue job ${id} was missing valid data`);
return;
}
if (!isRecord(data)) {
logger.warn(
`updateToSchemaVersion51: normal send queue job ${id} was missing valid data`
);
return;
}
const newJob = {
...job,
queueType: 'conversation',
data: {
...data,
type: 'NormalMessage',
},
};
const newJob = {
...job,
queueType: 'conversation',
data: {
...data,
type: 'NormalMessage',
},
};
insertJob(db, newJob);
});
db.pragma('user_version = 51');
})();
logger.info('updateToSchemaVersion51: success!');
insertJob(db, newJob);
});
}

View file

@ -3,36 +3,20 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion52(db: Database): void {
db.exec(
`
-- Create indices that don't have storyId in them so that
-- '_storyIdPredicate' could be optimized.
export default function updateToSchemaVersion52(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 52) {
return;
}
-- See migration 47
CREATE INDEX messages_conversation_no_story_id ON messages
(conversationId, isStory, received_at, sent_at);
db.transaction(() => {
db.exec(
`
-- Create indices that don't have storyId in them so that
-- '_storyIdPredicate' could be optimized.
-- See migration 47
CREATE INDEX messages_conversation_no_story_id ON messages
(conversationId, isStory, received_at, sent_at);
-- See migration 50
CREATE INDEX messages_unread_no_story_id ON messages
(conversationId, readStatus, isStory, received_at, sent_at)
WHERE readStatus IS NOT NULL;
`
);
db.pragma('user_version = 52');
})();
logger.info('updateToSchemaVersion52: success!');
-- See migration 50
CREATE INDEX messages_unread_no_story_id ON messages
(conversationId, readStatus, isStory, received_at, sent_at)
WHERE readStatus IS NOT NULL;
`
);
}

View file

@ -7,14 +7,9 @@ import type { LoggerType } from '../../types/Logging';
import { jsonToObject } from '../util';
export default function updateToSchemaVersion53(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 53) {
return;
}
type LegacyConversationType = {
id: string;
groupId: string;
@ -52,7 +47,7 @@ export default function updateToSchemaVersion53(
};
logger.info(
`updateToSchemaVersion53: Updating ${logId} with ` +
`Updating ${logId} with ` +
`${legacy.bannedMembersV2.length} banned members`
);
@ -64,33 +59,27 @@ export default function updateToSchemaVersion53(
return true;
};
db.transaction(() => {
const allConversations = db
.prepare(
`
SELECT json
FROM conversations
WHERE type = 'group'
ORDER BY id ASC;
`,
{ pluck: true }
)
.all<string>()
.map(json => jsonToObject<ConversationType>(json));
const allConversations = db
.prepare(
`
SELECT json
FROM conversations
WHERE type = 'group'
ORDER BY id ASC;
`,
{ pluck: true }
)
.all<string>()
.map(json => jsonToObject<ConversationType>(json));
logger.info(
'updateToSchemaVersion53: About to iterate through ' +
`${allConversations.length} conversations`
);
logger.info(
`About to iterate through ${allConversations.length} conversations`
);
let updated = 0;
for (const convo of allConversations) {
updated += upgradeConversation(convo) ? 1 : 0;
}
let updated = 0;
for (const convo of allConversations) {
updated += upgradeConversation(convo) ? 1 : 0;
}
logger.info(`updateToSchemaVersion53: Updated ${updated} conversations`);
db.pragma('user_version = 53');
})();
logger.info('updateToSchemaVersion53: success!');
logger.info(`Updated ${updated} conversations`);
}

View file

@ -3,25 +3,10 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion54(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 54) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN receivedAtCounter INTEGER;
`
);
db.pragma('user_version = 54');
})();
logger.info('updateToSchemaVersion54: success!');
export default function updateToSchemaVersion54(db: Database): void {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN receivedAtCounter INTEGER;
`
);
}

View file

@ -8,61 +8,45 @@ import { isRecord } from '../../util/isRecord';
import { isIterable } from '../../util/iterables';
export default function updateToSchemaVersion55(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 55) {
return;
}
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
db.transaction(() => {
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
// First, make sure that report spam job data has e164 and serverGuids
const reportSpamJobs = getJobsInQueue(db, 'report spam');
deleteJobsInQueue.run({ queueType: 'report spam' });
// First, make sure that report spam job data has e164 and serverGuids
const reportSpamJobs = getJobsInQueue(db, 'report spam');
deleteJobsInQueue.run({ queueType: 'report spam' });
reportSpamJobs.forEach(job => {
const { data, id } = job;
reportSpamJobs.forEach(job => {
const { data, id } = job;
if (!isRecord(data)) {
logger.warn(`report spam queue job ${id} was missing valid data`);
return;
}
if (!isRecord(data)) {
logger.warn(
`updateToSchemaVersion55: report spam queue job ${id} was missing valid data`
);
return;
}
const { e164, serverGuids } = data;
if (typeof e164 !== 'string') {
logger.warn(`report spam queue job ${id} had a non-string e164`);
return;
}
const { e164, serverGuids } = data;
if (typeof e164 !== 'string') {
logger.warn(
`updateToSchemaVersion55: report spam queue job ${id} had a non-string e164`
);
return;
}
if (!isIterable(serverGuids)) {
logger.warn(`report spam queue job ${id} had a non-iterable serverGuids`);
return;
}
if (!isIterable(serverGuids)) {
logger.warn(
`updateToSchemaVersion55: report spam queue job ${id} had a non-iterable serverGuids`
);
return;
}
const newJob = {
...job,
queueType: 'report spam',
data: {
uuid: e164, // this looks odd, but they are both strings and interchangeable in the server API
serverGuids,
},
};
const newJob = {
...job,
queueType: 'report spam',
data: {
uuid: e164, // this looks odd, but they are both strings and interchangeable in the server API
serverGuids,
},
};
insertJob(db, newJob);
});
db.pragma('user_version = 55');
})();
logger.info('updateToSchemaVersion55: success!');
insertJob(db, newJob);
});
}

View file

@ -5,69 +5,37 @@ import type { Database } from '@signalapp/sqlcipher';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SeenStatus } from '../../MessageSeenStatus';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion56(db: Database): void {
db.exec(
`
--- Add column to messages table
export default function updateToSchemaVersion56(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 56) {
return;
}
ALTER TABLE messages ADD COLUMN seenStatus NUMBER default 0;
db.transaction(() => {
db.exec(
`
--- Add column to messages table
--- Add index to make searching on this field easy
ALTER TABLE messages ADD COLUMN seenStatus NUMBER default 0;
CREATE INDEX messages_unseen_no_story ON messages
(conversationId, seenStatus, isStory, received_at, sent_at)
WHERE
seenStatus IS NOT NULL;
--- Add index to make searching on this field easy
CREATE INDEX messages_unseen_with_story ON messages
(conversationId, seenStatus, isStory, storyId, received_at, sent_at)
WHERE
seenStatus IS NOT NULL;
CREATE INDEX messages_unseen_no_story ON messages
(conversationId, seenStatus, isStory, received_at, sent_at)
WHERE
seenStatus IS NOT NULL;
--- Update seenStatus to UnseenStatus.Unseen for certain messages
--- (NULL included because 'timer-notification' in 1:1 convos had type = NULL)
CREATE INDEX messages_unseen_with_story ON messages
(conversationId, seenStatus, isStory, storyId, received_at, sent_at)
WHERE
seenStatus IS NOT NULL;
--- Update seenStatus to UnseenStatus.Unseen for certain messages
--- (NULL included because 'timer-notification' in 1:1 convos had type = NULL)
UPDATE messages
SET
seenStatus = ${SeenStatus.Unseen}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IS NULL
OR
type IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
'delivery-issue',
'group',
'incoming',
'keychange',
'timer-notification',
'verified-change'
)
);
--- Set readStatus to ReadStatus.Read for all other message types
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
type IS NOT NULL AND
type NOT IN (
UPDATE messages
SET
seenStatus = ${SeenStatus.Unseen}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IS NULL
OR
type IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
@ -77,12 +45,28 @@ export default function updateToSchemaVersion56(
'keychange',
'timer-notification',
'verified-change'
);
`
);
)
);
db.pragma('user_version = 56');
})();
--- Set readStatus to ReadStatus.Read for all other message types
logger.info('updateToSchemaVersion56: success!');
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
type IS NOT NULL AND
type NOT IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
'delivery-issue',
'group',
'incoming',
'keychange',
'timer-notification',
'verified-change'
);
`
);
}

View file

@ -3,27 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion57(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 57) {
return;
}
db.transaction(() => {
db.exec(
`
DELETE FROM messages
WHERE type IS 'message-history-unsynced';
`
);
db.pragma('user_version = 57');
})();
logger.info('updateToSchemaVersion57: success!');
export default function updateToSchemaVersion57(db: Database): void {
db.exec(
`
DELETE FROM messages
WHERE type IS 'message-history-unsynced';
`
);
}

View file

@ -5,73 +5,43 @@ import type { Database } from '@signalapp/sqlcipher';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SeenStatus } from '../../MessageSeenStatus';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion58(db: Database): void {
db.exec(
`
--- Promote unread status in JSON to SQL column
export default function updateToSchemaVersion58(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 58) {
return;
}
-- NOTE: This was disabled because the 'unread' json field was deprecated
-- in b0750e5f4e1f79f0f177b17cbe06d688431f948d, but the old value was kept
-- in the messages created before the release of that commit.
--
-- UPDATE messages
-- SET
-- readStatus = ${ReadStatus.Unread},
-- seenStatus = ${SeenStatus.Unseen}
-- WHERE
-- json_extract(json, '$.unread') IS true OR
-- json_extract(json, '$.unread') IS 1;
db.transaction(() => {
db.exec(
`
--- Promote unread status in JSON to SQL column
--- Clean up all old messages that still have a null read status
--- Note: we don't need to update seenStatus, because that was defaulted to zero
-- NOTE: This was disabled because the 'unread' json field was deprecated
-- in b0750e5f4e1f79f0f177b17cbe06d688431f948d, but the old value was kept
-- in the messages created before the release of that commit.
--
-- UPDATE messages
-- SET
-- readStatus = ${ReadStatus.Unread},
-- seenStatus = ${SeenStatus.Unseen}
-- WHERE
-- json_extract(json, '$.unread') IS true OR
-- json_extract(json, '$.unread') IS 1;
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus IS NULL;
--- Clean up all old messages that still have a null read status
--- Note: we don't need to update seenStatus, because that was defaulted to zero
--- Re-run unseen/unread queries from migration 56
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus IS NULL;
--- Re-run unseen/unread queries from migration 56
UPDATE messages
SET
seenStatus = ${SeenStatus.Unseen}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IS NULL
OR
type IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
'delivery-issue',
'group',
'incoming',
'keychange',
'timer-notification',
'verified-change'
)
);
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
type IS NOT NULL AND
type NOT IN (
UPDATE messages
SET
seenStatus = ${SeenStatus.Unseen}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IS NULL
OR
type IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
@ -81,56 +51,70 @@ export default function updateToSchemaVersion58(
'keychange',
'timer-notification',
'verified-change'
);
--- (new) Ensure these message types are not unread, just unseen
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IN (
'change-number-notification',
'keychange'
)
);
--- (new) Ensure that these message types are neither unseen nor unread
UPDATE messages
SET
readStatus = ${ReadStatus.Read},
seenStatus = ${SeenStatus.Seen}
WHERE
type IN (
'group-v1-migration',
'message-history-unsynced',
'outgoing',
'profile-change',
'universal-timer-notification'
);
--- Make sure JSON reflects SQL columns
UPDATE messages
SET
json = json_patch(
json,
json_object(
'readStatus', readStatus,
'seenStatus', seenStatus
)
)
WHERE
readStatus IS NOT NULL OR
seenStatus IS NOT 0;
`
);
);
db.pragma('user_version = 58');
})();
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
type IS NOT NULL AND
type NOT IN (
'call-history',
'change-number-notification',
'chat-session-refreshed',
'delivery-issue',
'group',
'incoming',
'keychange',
'timer-notification',
'verified-change'
);
logger.info('updateToSchemaVersion58: success!');
--- (new) Ensure these message types are not unread, just unseen
UPDATE messages
SET
readStatus = ${ReadStatus.Read}
WHERE
readStatus = ${ReadStatus.Unread} AND
(
type IN (
'change-number-notification',
'keychange'
)
);
--- (new) Ensure that these message types are neither unseen nor unread
UPDATE messages
SET
readStatus = ${ReadStatus.Read},
seenStatus = ${SeenStatus.Seen}
WHERE
type IN (
'group-v1-migration',
'message-history-unsynced',
'outgoing',
'profile-change',
'universal-timer-notification'
);
--- Make sure JSON reflects SQL columns
UPDATE messages
SET
json = json_patch(
json,
json_object(
'readStatus', readStatus,
'seenStatus', seenStatus
)
)
WHERE
readStatus IS NOT NULL OR
seenStatus IS NOT 0;
`
);
}

View file

@ -3,26 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion59(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 59) {
return;
}
db.transaction(() => {
db.exec(
`
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
(receivedAtCounter)
`
);
db.pragma('user_version = 59');
})();
logger.info('updateToSchemaVersion59: success!');
export default function updateToSchemaVersion59(db: Database): void {
db.exec(
`
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
(receivedAtCounter)
`
);
}

View file

@ -3,38 +3,22 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
// TODO: DESKTOP-3694
export default function updateToSchemaVersion60(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 60) {
return;
}
export default function updateToSchemaVersion60(db: Database): void {
db.exec(
`
DROP INDEX expiring_message_by_conversation_and_received_at;
db.transaction(() => {
db.exec(
`
DROP INDEX expiring_message_by_conversation_and_received_at;
CREATE INDEX expiring_message_by_conversation_and_received_at
ON messages
(
conversationId,
storyId,
expirationStartTimestamp,
expireTimer,
received_at
)
WHERE isStory IS 0 AND type IS 'incoming';
`
);
db.pragma('user_version = 60');
})();
logger.info('updateToSchemaVersion60: success!');
CREATE INDEX expiring_message_by_conversation_and_received_at
ON messages
(
conversationId,
storyId,
expirationStartTimestamp,
expireTimer,
received_at
)
WHERE isStory IS 0 AND type IS 'incoming';
`
);
}

View file

@ -3,42 +3,26 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion61(db: Database): void {
db.exec(
`
ALTER TABLE storyDistributions DROP COLUMN avatarKey;
ALTER TABLE storyDistributions DROP COLUMN avatarUrlPath;
export default function updateToSchemaVersion61(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 61) {
return;
}
ALTER TABLE storyDistributions ADD COLUMN deletedAtTimestamp INTEGER;
ALTER TABLE storyDistributions ADD COLUMN allowsReplies INTEGER;
ALTER TABLE storyDistributions ADD COLUMN isBlockList INTEGER;
db.transaction(() => {
db.exec(
`
ALTER TABLE storyDistributions DROP COLUMN avatarKey;
ALTER TABLE storyDistributions DROP COLUMN avatarUrlPath;
ALTER TABLE storyDistributions ADD COLUMN storageID STRING;
ALTER TABLE storyDistributions ADD COLUMN storageVersion INTEGER;
ALTER TABLE storyDistributions ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE storyDistributions ADD COLUMN storageNeedsSync INTEGER;
ALTER TABLE storyDistributions ADD COLUMN deletedAtTimestamp INTEGER;
ALTER TABLE storyDistributions ADD COLUMN allowsReplies INTEGER;
ALTER TABLE storyDistributions ADD COLUMN isBlockList INTEGER;
ALTER TABLE messages ADD COLUMN storyDistributionListId STRING;
ALTER TABLE storyDistributions ADD COLUMN storageID STRING;
ALTER TABLE storyDistributions ADD COLUMN storageVersion INTEGER;
ALTER TABLE storyDistributions ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE storyDistributions ADD COLUMN storageNeedsSync INTEGER;
ALTER TABLE messages ADD COLUMN storyDistributionListId STRING;
CREATE INDEX messages_by_distribution_list
ON messages(storyDistributionListId, received_at)
WHERE storyDistributionListId IS NOT NULL;
`
);
db.pragma('user_version = 61');
})();
logger.info('updateToSchemaVersion61: success!');
CREATE INDEX messages_by_distribution_list
ON messages(storyDistributionListId, received_at)
WHERE storyDistributionListId IS NOT NULL;
`
);
}

View file

@ -3,26 +3,10 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion62(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 62) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE sendLogPayloads ADD COLUMN urgent INTEGER;
`
);
db.pragma('user_version = 62');
})();
logger.info('updateToSchemaVersion62: success!');
export default function updateToSchemaVersion62(db: Database): void {
db.exec(
`
ALTER TABLE sendLogPayloads ADD COLUMN urgent INTEGER;
`
);
}

View file

@ -3,26 +3,10 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion63(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 63) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN urgent INTEGER;
`
);
db.pragma('user_version = 63');
})();
logger.info('updateToSchemaVersion63: success!');
export default function updateToSchemaVersion63(db: Database): void {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN urgent INTEGER;
`
);
}

View file

@ -3,36 +3,20 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion64(db: Database): void {
db.exec(
`
ALTER TABLE preKeys
ADD COLUMN ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
export default function updateToSchemaVersion64(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 64) {
return;
}
CREATE INDEX preKeys_ourUuid ON preKeys (ourUuid);
db.transaction(() => {
db.exec(
`
ALTER TABLE preKeys
ADD COLUMN ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
ALTER TABLE signedPreKeys
ADD COLUMN ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
CREATE INDEX preKeys_ourUuid ON preKeys (ourUuid);
ALTER TABLE signedPreKeys
ADD COLUMN ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
CREATE INDEX signedPreKeys_ourUuid ON signedPreKeys (ourUuid);
`
);
db.pragma('user_version = 64');
})();
logger.info('updateToSchemaVersion64: success!');
CREATE INDEX signedPreKeys_ourUuid ON signedPreKeys (ourUuid);
`
);
}

View file

@ -3,60 +3,44 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion65(db: Database): void {
db.exec(
`
ALTER TABLE sticker_packs ADD COLUMN position INTEGER DEFAULT 0 NOT NULL;
ALTER TABLE sticker_packs ADD COLUMN storageID STRING;
ALTER TABLE sticker_packs ADD COLUMN storageVersion INTEGER;
ALTER TABLE sticker_packs ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE sticker_packs
ADD COLUMN storageNeedsSync
INTEGER DEFAULT 0 NOT NULL;
export default function updateToSchemaVersion65(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 65) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE sticker_packs ADD COLUMN position INTEGER DEFAULT 0 NOT NULL;
ALTER TABLE sticker_packs ADD COLUMN storageID STRING;
ALTER TABLE sticker_packs ADD COLUMN storageVersion INTEGER;
ALTER TABLE sticker_packs ADD COLUMN storageUnknownFields BLOB;
ALTER TABLE sticker_packs
ADD COLUMN storageNeedsSync
INTEGER DEFAULT 0 NOT NULL;
CREATE TABLE uninstalled_sticker_packs (
id STRING NOT NULL PRIMARY KEY,
uninstalledAt NUMBER NOT NULL,
storageID STRING,
storageVersion NUMBER,
storageUnknownFields BLOB,
storageNeedsSync INTEGER NOT NULL
);
-- Set initial position
UPDATE sticker_packs
SET
position = (row_number - 1),
storageNeedsSync = 1
FROM (
SELECT id, row_number() OVER (ORDER BY lastUsed DESC) as row_number
FROM sticker_packs
) as ordered_pairs
WHERE sticker_packs.id IS ordered_pairs.id;
-- See: getAllStickerPacks
CREATE INDEX sticker_packs_by_position_and_id ON sticker_packs (
position ASC,
id ASC
);
`
CREATE TABLE uninstalled_sticker_packs (
id STRING NOT NULL PRIMARY KEY,
uninstalledAt NUMBER NOT NULL,
storageID STRING,
storageVersion NUMBER,
storageUnknownFields BLOB,
storageNeedsSync INTEGER NOT NULL
);
db.pragma('user_version = 65');
})();
-- Set initial position
logger.info('updateToSchemaVersion65: success!');
UPDATE sticker_packs
SET
position = (row_number - 1),
storageNeedsSync = 1
FROM (
SELECT id, row_number() OVER (ORDER BY lastUsed DESC) as row_number
FROM sticker_packs
) as ordered_pairs
WHERE sticker_packs.id IS ordered_pairs.id;
-- See: getAllStickerPacks
CREATE INDEX sticker_packs_by_position_and_id ON sticker_packs (
position ASC,
id ASC
);
`
);
}

View file

@ -3,27 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion66(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 66) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE sendLogPayloads
ADD COLUMN hasPniSignatureMessage INTEGER DEFAULT 0 NOT NULL;
`
);
db.pragma('user_version = 66');
})();
logger.info('updateToSchemaVersion66: success!');
export default function updateToSchemaVersion66(db: Database): void {
db.exec(
`
ALTER TABLE sendLogPayloads
ADD COLUMN hasPniSignatureMessage INTEGER DEFAULT 0 NOT NULL;
`
);
}

View file

@ -3,26 +3,10 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion67(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 67) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN story INTEGER;
`
);
db.pragma('user_version = 67');
})();
logger.info('updateToSchemaVersion67: success!');
export default function updateToSchemaVersion67(db: Database): void {
db.exec(
`
ALTER TABLE unprocessed ADD COLUMN story INTEGER;
`
);
}

View file

@ -3,29 +3,13 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion68(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 68) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE messages
DROP COLUMN deprecatedSourceDevice;
ALTER TABLE unprocessed
DROP COLUMN deprecatedSourceDevice;
`
);
db.pragma('user_version = 68');
})();
logger.info('updateToSchemaVersion68: success!');
export default function updateToSchemaVersion68(db: Database): void {
db.exec(
`
ALTER TABLE messages
DROP COLUMN deprecatedSourceDevice;
ALTER TABLE unprocessed
DROP COLUMN deprecatedSourceDevice;
`
);
}

View file

@ -3,31 +3,15 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion69(db: Database): void {
db.exec(
`
DROP TABLE IF EXISTS groupCallRings;
export default function updateToSchemaVersion69(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 69) {
return;
}
db.transaction(() => {
db.exec(
`
DROP TABLE IF EXISTS groupCallRings;
CREATE TABLE groupCallRingCancellations(
ringId INTEGER PRIMARY KEY,
createdAt INTEGER NOT NULL
);
`
CREATE TABLE groupCallRingCancellations(
ringId INTEGER PRIMARY KEY,
createdAt INTEGER NOT NULL
);
db.pragma('user_version = 69');
})();
logger.info('updateToSchemaVersion69: success!');
`
);
}

View file

@ -3,27 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion70(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 70) {
return;
}
db.transaction(() => {
// Used in `getAllStories`.
db.exec(
`
CREATE INDEX messages_by_storyId ON messages (storyId);
`
);
db.pragma('user_version = 70');
})();
logger.info('updateToSchemaVersion70: success!');
export default function updateToSchemaVersion70(db: Database): void {
// Used in `getAllStories`.
db.exec(
`
CREATE INDEX messages_by_storyId ON messages (storyId);
`
);
}

View file

@ -3,121 +3,105 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion71(db: Database): void {
db.exec(
`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
export default function updateToSchemaVersion71(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 71) {
return;
}
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
db.transaction(() => {
db.exec(
`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
--- Note: These generated columns were originally introduced in migration 47, and
--- are mostly the same
--- Based on the current list (model-types.ts), the types which DO affect activity:
--- NULL (old, malformed data)
--- call-history
--- chat-session-refreshed (deprecated)
--- delivery-issue
--- group (deprecated)
--- group-v2-change
--- incoming
--- outgoing
--- timer-notification
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
--- (change: added conversation-merge, keychange, and phone-number-discovery)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: These generated columns were originally introduced in migration 47, and
--- are mostly the same
--- Based on the current list (model-types.ts), the types which DO affect activity:
--- NULL (old, malformed data)
--- call-history
--- chat-session-refreshed (deprecated)
--- delivery-issue
--- group (deprecated)
--- group-v2-change
--- incoming
--- outgoing
--- timer-notification
--- (change: added conversation-merge and phone-number-discovery
--- (now matches the above list)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: added conversation-merge, keychange, and phone-number-discovery)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: This list only differs from the above on these types:
--- group-v2-change
--- (change: added conversation-merge and phone-number-discovery
--- (now matches the above list)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: added conversation-merge and phone-number-discovery
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: This list only differs from the above on these types:
--- group-v2-change
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
--- (change: added conversation-merge and phone-number-discovery
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'phone-number-discovery',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
db.pragma('user_version = 71');
})();
logger.info('updateToSchemaVersion71: success!');
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
}

View file

@ -2,37 +2,22 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion72(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 72) {
return;
}
db.transaction(() => {
db.exec(
`
ALTER TABLE messages
ADD COLUMN callId TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callHistoryDetails.callId')
);
ALTER TABLE messages
ADD COLUMN callMode TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callHistoryDetails.callMode')
);
CREATE INDEX messages_call ON messages
(conversationId, type, callMode, callId);
`
);
db.pragma('user_version = 72');
})();
logger.info('updateToSchemaVersion72: success!');
export default function updateToSchemaVersion72(db: Database): void {
db.exec(
`
ALTER TABLE messages
ADD COLUMN callId TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callHistoryDetails.callId')
);
ALTER TABLE messages
ADD COLUMN callMode TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callHistoryDetails.callMode')
);
CREATE INDEX messages_call ON messages
(conversationId, type, callMode, callId);
`
);
}

View file

@ -3,110 +3,94 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion73(db: Database): void {
db.exec(
`
--- Delete deprecated notifications
DELETE FROM messages WHERE type IS 'phone-number-discovery';
export default function updateToSchemaVersion73(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 73) {
return;
}
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
db.transaction(() => {
db.exec(
`
--- Delete deprecated notifications
DELETE FROM messages WHERE type IS 'phone-number-discovery';
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
--- Note: These generated columns were originally introduced in migration 71, and
--- are mostly the same
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
--- (change: removed phone-number-discovery)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: These generated columns were originally introduced in migration 71, and
--- are mostly the same
--- (change: removed phone-number-discovery
--- (now matches the above list)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: removed phone-number-discovery)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: This list only differs from the above on these types:
--- group-v2-change
--- (change: removed phone-number-discovery
--- (now matches the above list)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: removed phone-number-discovery
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: This list only differs from the above on these types:
--- group-v2-change
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
--- (change: removed phone-number-discovery
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
db.pragma('user_version = 73');
})();
logger.info('updateToSchemaVersion73: success!');
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
`
);
}

View file

@ -3,37 +3,21 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion74(db: Database): void {
db.exec(
`
-- Previously: (isUserInitiatedMessage)
DROP INDEX message_user_initiated;
export default function updateToSchemaVersion74(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 74) {
return;
}
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
db.transaction(() => {
db.exec(
`
-- Previously: (isUserInitiatedMessage)
DROP INDEX message_user_initiated;
-- Previously: (unread, conversationId)
DROP INDEX reactions_unread;
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
-- Previously: (unread, conversationId)
DROP INDEX reactions_unread;
CREATE INDEX reactions_unread ON reactions (
conversationId,
unread
);
`
CREATE INDEX reactions_unread ON reactions (
conversationId,
unread
);
db.pragma('user_version = 74');
})();
logger.info('updateToSchemaVersion74: success!');
`
);
}

View file

@ -1,25 +1,8 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion75(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 75) {
return;
}
db.transaction(() => {
// This was previously a FTS5 migration, but we had to reorder the
// migrations for backports.
// See: migrations 76 and 77.
db.pragma('user_version = 75');
})();
logger.info('updateToSchemaVersion75: success!');
export default function updateToSchemaVersion75(): void {
// This was previously a FTS5 migration, but we had to reorder the
// migrations for backports.
// See: migrations 76 and 77.
}

View file

@ -3,57 +3,41 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion76(db: Database): void {
db.exec(
`
-- Re-created below
DROP INDEX IF EXISTS message_expires_at;
DROP INDEX IF EXISTS messages_preview;
export default function updateToSchemaVersion76(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 76) {
return;
}
-- Create non-null expiresAt column
ALTER TABLE messages
DROP COLUMN expiresAt;
db.transaction(() => {
db.exec(
`
-- Re-created below
DROP INDEX IF EXISTS message_expires_at;
DROP INDEX IF EXISTS messages_preview;
ALTER TABLE messages
ADD COLUMN
expiresAt INT
GENERATED ALWAYS
AS (ifnull(
expirationStartTimestamp + (expireTimer * 1000),
${Number.MAX_SAFE_INTEGER}
));
-- Create non-null expiresAt column
ALTER TABLE messages
DROP COLUMN expiresAt;
ALTER TABLE messages
ADD COLUMN
expiresAt INT
GENERATED ALWAYS
AS (ifnull(
expirationStartTimestamp + (expireTimer * 1000),
${Number.MAX_SAFE_INTEGER}
));
-- Re-create indexes
-- Note the "s" at the end of "messages"
CREATE INDEX messages_expires_at ON messages (
expiresAt
);
-- Note that expiresAt is intentionally dropped from the index since
-- expiresAt > $now is likely to be true so we just try selecting it
-- *after* ordering by received_at/sent_at.
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
`
-- Re-create indexes
-- Note the "s" at the end of "messages"
CREATE INDEX messages_expires_at ON messages (
expiresAt
);
db.pragma('user_version = 76');
})();
logger.info('updateToSchemaVersion76: success!');
-- Note that expiresAt is intentionally dropped from the index since
-- expiresAt > $now is likely to be true so we just try selecting it
-- *after* ordering by received_at/sent_at.
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
`
);
}

View file

@ -3,42 +3,26 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion77(db: Database): void {
db.exec(
`
-- Create FTS table with custom tokenizer from
-- @signalapp/sqlcipher.
export default function updateToSchemaVersion77(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 77) {
return;
}
DROP TABLE messages_fts;
db.transaction(() => {
db.exec(
`
-- Create FTS table with custom tokenizer from
-- @signalapp/sqlcipher.
DROP TABLE messages_fts;
CREATE VIRTUAL TABLE messages_fts USING fts5(
body,
tokenize = 'signal_tokenizer'
);
-- Reindex messages
-- Based on messages_on_insert trigger from migrations/45-stories.ts
INSERT INTO messages_fts (rowid, body)
SELECT rowid, body
FROM messages
WHERE isViewOnce IS NOT 1 AND storyId IS NULL;
`
CREATE VIRTUAL TABLE messages_fts USING fts5(
body,
tokenize = 'signal_tokenizer'
);
db.pragma('user_version = 77');
})();
-- Reindex messages
-- Based on messages_on_insert trigger from migrations/45-stories.ts
logger.info('updateToSchemaVersion77: success!');
INSERT INTO messages_fts (rowid, body)
SELECT rowid, body
FROM messages
WHERE isViewOnce IS NOT 1 AND storyId IS NULL;
`
);
}

View file

@ -7,125 +7,112 @@ import type { WritableDB } from '../Interface';
import { getJobsInQueue, insertJob } from '../Server';
export default function updateToSchemaVersion78(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 78) {
return;
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
const queues = [
{
queueType: 'delivery receipts',
jobDataKey: 'deliveryReceipts',
jobDataIsArray: true,
newReceiptsType: 'deliveryReceipt',
},
{
queueType: 'read receipts',
jobDataKey: 'readReceipts',
jobDataIsArray: true,
newReceiptsType: 'readReceipt',
},
{
queueType: 'viewed receipts',
jobDataKey: 'viewedReceipt',
jobDataIsArray: false,
newReceiptsType: 'viewedReceipt',
},
];
const getMessageById = db.prepare(
'SELECT conversationId FROM messages WHERE id IS ?'
);
for (const queue of queues) {
const prevJobs = getJobsInQueue(db, queue.queueType);
deleteJobsInQueue.run({ queueType: queue.queueType });
prevJobs.forEach(job => {
const { data, id } = job;
if (!isRecord(data)) {
logger.warn(
`${queue.queueType} queue job ${id} was missing valid data`
);
return;
}
const { messageId } = data;
if (typeof messageId !== 'string') {
logger.warn(
`${queue.queueType} queue job ${id} had a non-string messageId`
);
return;
}
const message = getMessageById.get([messageId]);
if (!message) {
logger.warn(`Unable to find message for ${queue.queueType} job ${id}`);
return;
}
const { conversationId } = message;
if (typeof conversationId !== 'string') {
logger.warn(
`${queue.queueType} queue job ${id} had a non-string conversationId`
);
return;
}
const oldReceipts = queue.jobDataIsArray
? data[queue.jobDataKey]
: [data[queue.jobDataKey]];
if (!Array.isArray(oldReceipts)) {
logger.warn(
`${queue.queueType} queue job ${id} had a non-array ${queue.jobDataKey}`
);
return;
}
const newReceipts = [];
for (const receipt of oldReceipts) {
if (!isRecord(receipt)) {
logger.warn(
`${queue.queueType} queue job ${id} had a non-record receipt`
);
continue;
}
newReceipts.push({
...receipt,
conversationId,
});
}
const newJob = {
...job,
queueType: 'conversation',
data: {
type: 'Receipts',
conversationId,
receiptsType: queue.newReceiptsType,
receipts: newReceipts,
},
};
insertJob(db, newJob);
});
}
db.transaction(() => {
const deleteJobsInQueue = db.prepare(
'DELETE FROM jobs WHERE queueType = $queueType'
);
const queues = [
{
queueType: 'delivery receipts',
jobDataKey: 'deliveryReceipts',
jobDataIsArray: true,
newReceiptsType: 'deliveryReceipt',
},
{
queueType: 'read receipts',
jobDataKey: 'readReceipts',
jobDataIsArray: true,
newReceiptsType: 'readReceipt',
},
{
queueType: 'viewed receipts',
jobDataKey: 'viewedReceipt',
jobDataIsArray: false,
newReceiptsType: 'viewedReceipt',
},
];
const getMessageById = db.prepare(
'SELECT conversationId FROM messages WHERE id IS ?'
);
for (const queue of queues) {
const prevJobs = getJobsInQueue(db, queue.queueType);
deleteJobsInQueue.run({ queueType: queue.queueType });
prevJobs.forEach(job => {
const { data, id } = job;
if (!isRecord(data)) {
logger.warn(
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} was missing valid data`
);
return;
}
const { messageId } = data;
if (typeof messageId !== 'string') {
logger.warn(
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-string messageId`
);
return;
}
const message = getMessageById.get([messageId]);
if (!message) {
logger.warn(
`updateToSchemaVersion78: Unable to find message for ${queue.queueType} job ${id}`
);
return;
}
const { conversationId } = message;
if (typeof conversationId !== 'string') {
logger.warn(
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-string conversationId`
);
return;
}
const oldReceipts = queue.jobDataIsArray
? data[queue.jobDataKey]
: [data[queue.jobDataKey]];
if (!Array.isArray(oldReceipts)) {
logger.warn(
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-array ${queue.jobDataKey}`
);
return;
}
const newReceipts = [];
for (const receipt of oldReceipts) {
if (!isRecord(receipt)) {
logger.warn(
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-record receipt`
);
continue;
}
newReceipts.push({
...receipt,
conversationId,
});
}
const newJob = {
...job,
queueType: 'conversation',
data: {
type: 'Receipts',
conversationId,
receiptsType: queue.newReceiptsType,
receipts: newReceipts,
},
};
insertJob(db, newJob);
});
}
db.pragma('user_version = 78');
})();
logger.info('updateToSchemaVersion78: success!');
}

View file

@ -3,30 +3,14 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion79(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 79) {
return;
}
db.transaction(() => {
db.exec(`
DROP INDEX messages_hasVisualMediaAttachments;
CREATE INDEX messages_hasVisualMediaAttachments
ON messages (
conversationId, isStory, storyId,
hasVisualMediaAttachments, received_at, sent_at
)
WHERE hasVisualMediaAttachments IS 1;
`);
db.pragma('user_version = 79');
})();
logger.info('updateToSchemaVersion79: success!');
export default function updateToSchemaVersion79(db: Database): void {
db.exec(`
DROP INDEX messages_hasVisualMediaAttachments;
CREATE INDEX messages_hasVisualMediaAttachments
ON messages (
conversationId, isStory, storyId,
hasVisualMediaAttachments, received_at, sent_at
)
WHERE hasVisualMediaAttachments IS 1;
`);
}

View file

@ -3,32 +3,16 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion80(db: Database): void {
db.exec(`
CREATE TABLE edited_messages(
fromId STRING,
messageId STRING REFERENCES messages(id)
ON DELETE CASCADE,
sentAt INTEGER,
readStatus INTEGER
);
export default function updateToSchemaVersion80(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 80) {
return;
}
db.transaction(() => {
db.exec(`
CREATE TABLE edited_messages(
fromId STRING,
messageId STRING REFERENCES messages(id)
ON DELETE CASCADE,
sentAt INTEGER,
readStatus INTEGER
);
CREATE INDEX edited_messages_sent_at ON edited_messages (sentAt);
`);
db.pragma('user_version = 80');
})();
logger.info('updateToSchemaVersion80: success!');
CREATE INDEX edited_messages_sent_at ON edited_messages (sentAt);
`);
}

View file

@ -3,116 +3,100 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion81(db: Database): void {
db.exec(
`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
export default function updateToSchemaVersion81(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 81) {
return;
}
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
db.transaction(() => {
db.exec(
`
--- These will be re-added below
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
DROP INDEX message_user_initiated;
--- Note: These generated columns were previously modified in
--- migration 73, and are mostly the same
--- These will also be re-added below
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- Note: These generated columns were previously modified in
--- migration 73, and are mostly the same
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN shouldAffectActivity INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN shouldAffectPreview INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- From migration 76
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
--- (change: added contact-removed-notification)
ALTER TABLE messages
ADD COLUMN isUserInitiatedMessage INTEGER
GENERATED ALWAYS AS (
type IS NULL
OR
type NOT IN (
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'group-v2-change',
'keychange',
'message-history-unsynced',
'profile-change',
'story',
'universal-timer-notification',
'verified-change'
)
);
--- From migration 76
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
--- From migration 76
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
--- From migration 73
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
--- From migration 76
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
--- From migration 73
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
--- From migration 74
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`
);
db.pragma('user_version = 81');
})();
logger.info('updateToSchemaVersion81: success!');
--- From migration 74
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
`
);
}

View file

@ -3,27 +3,11 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion82(db: Database): void {
db.exec(`
ALTER TABLE edited_messages DROP COLUMN fromId;
ALTER TABLE edited_messages ADD COLUMN conversationId STRING;
export default function updateToSchemaVersion82(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 82) {
return;
}
db.transaction(() => {
db.exec(`
ALTER TABLE edited_messages DROP COLUMN fromId;
ALTER TABLE edited_messages ADD COLUMN conversationId STRING;
CREATE INDEX edited_messages_unread ON edited_messages (readStatus, conversationId);
`);
db.pragma('user_version = 82');
})();
logger.info('updateToSchemaVersion82: success!');
CREATE INDEX edited_messages_unread ON edited_messages (readStatus, conversationId);
`);
}

View file

@ -2,37 +2,22 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion83(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 83) {
return;
}
export default function updateToSchemaVersion83(db: Database): void {
db.exec(
`
ALTER TABLE messages
ADD COLUMN mentionsMe INTEGER NOT NULL DEFAULT 0;
db.transaction(() => {
db.exec(
`
ALTER TABLE messages
ADD COLUMN mentionsMe INTEGER NOT NULL DEFAULT 0;
-- one which includes story data...
CREATE INDEX messages_unread_mentions ON messages
(conversationId, readStatus, mentionsMe, isStory, storyId, received_at, sent_at)
WHERE readStatus IS NOT NULL;
-- ...and one which doesn't, so storyPredicate works as expected
CREATE INDEX messages_unread_mentions_no_story_id ON messages
(conversationId, readStatus, mentionsMe, isStory, received_at, sent_at)
WHERE isStory IS 0 AND readStatus IS NOT NULL;
`
);
-- one which includes story data...
CREATE INDEX messages_unread_mentions ON messages
(conversationId, readStatus, mentionsMe, isStory, storyId, received_at, sent_at)
WHERE readStatus IS NOT NULL;
db.pragma('user_version = 83');
})();
logger.info('updateToSchemaVersion83: success!');
-- ...and one which doesn't, so storyPredicate works as expected
CREATE INDEX messages_unread_mentions_no_story_id ON messages
(conversationId, readStatus, mentionsMe, isStory, received_at, sent_at)
WHERE isStory IS 0 AND readStatus IS NOT NULL;
`
);
}

View file

@ -2,59 +2,44 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion84(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 84) {
return;
}
export default function updateToSchemaVersion84(db: Database): void {
const selectMentionsFromMessages = `
SELECT messages.id, bodyRanges.value ->> 'mentionUuid' as mentionUuid, bodyRanges.value ->> 'start' as start, bodyRanges.value ->> 'length' as length
FROM messages, json_each(messages.json ->> 'bodyRanges') as bodyRanges
WHERE bodyRanges.value ->> 'mentionUuid' IS NOT NULL
`;
db.transaction(() => {
const selectMentionsFromMessages = `
SELECT messages.id, bodyRanges.value ->> 'mentionUuid' as mentionUuid, bodyRanges.value ->> 'start' as start, bodyRanges.value ->> 'length' as length
FROM messages, json_each(messages.json ->> 'bodyRanges') as bodyRanges
WHERE bodyRanges.value ->> 'mentionUuid' IS NOT NULL
`;
db.exec(`
DROP TABLE IF EXISTS mentions;
db.exec(`
DROP TABLE IF EXISTS mentions;
CREATE TABLE mentions (
messageId REFERENCES messages(id) ON DELETE CASCADE,
mentionUuid STRING,
start INTEGER,
length INTEGER
);
CREATE TABLE mentions (
messageId REFERENCES messages(id) ON DELETE CASCADE,
mentionUuid STRING,
start INTEGER,
length INTEGER
);
CREATE INDEX mentions_uuid ON mentions (mentionUuid);
CREATE INDEX mentions_uuid ON mentions (mentionUuid);
INSERT INTO mentions (messageId, mentionUuid, start, length)
${selectMentionsFromMessages};
-- Note: any changes to this trigger must be reflected in
-- Server.ts: enableMessageInsertTriggersAndBackfill
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
BEGIN
INSERT INTO mentions (messageId, mentionUuid, start, length)
${selectMentionsFromMessages};
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
-- Note: any changes to this trigger must be reflected in
-- Server.ts: enableMessageInsertTriggersAndBackfill
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
BEGIN
INSERT INTO mentions (messageId, mentionUuid, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
BEGIN
DELETE FROM mentions WHERE messageId = new.id;
INSERT INTO mentions (messageId, mentionUuid, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
`);
db.pragma('user_version = 84');
})();
logger.info('updateToSchemaVersion84: success!');
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
BEGIN
DELETE FROM mentions WHERE messageId = new.id;
INSERT INTO mentions (messageId, mentionUuid, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
`);
}

View file

@ -3,40 +3,24 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion85(db: Database): void {
db.exec(
`CREATE TABLE kyberPreKeys(
id STRING PRIMARY KEY NOT NULL,
json TEXT NOT NULL,
ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'))
);`
);
export default function updateToSchemaVersion85(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 85) {
return;
}
// To manage our ACI or PNI keys quickly
db.exec('CREATE INDEX kyberPreKeys_ourUuid ON kyberPreKeys (ourUuid);');
db.transaction(() => {
db.exec(
`CREATE TABLE kyberPreKeys(
id STRING PRIMARY KEY NOT NULL,
json TEXT NOT NULL,
ourUuid STRING
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'))
);`
);
// To manage our ACI or PNI keys quickly
db.exec('CREATE INDEX kyberPreKeys_ourUuid ON kyberPreKeys (ourUuid);');
// Add time to all existing preKeys to allow us to expire them
const now = Date.now();
db.exec(
`UPDATE preKeys SET
json = json_set(json, '$.createdAt', ${now});
`
);
db.pragma('user_version = 85');
})();
logger.info('updateToSchemaVersion85: success!');
// Add time to all existing preKeys to allow us to expire them
const now = Date.now();
db.exec(
`UPDATE preKeys SET
json = json_set(json, '$.createdAt', ${now});
`
);
}

View file

@ -3,30 +3,14 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export default function updateToSchemaVersion86(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 86) {
return;
}
db.transaction(() => {
// The key reason for this new schema is that all of our previous schemas start with
// conversationId. This query is meant to find all replies to a given story, no
// matter the conversation.
db.exec(
`CREATE INDEX messages_story_replies
ON messages (storyId, received_at, sent_at)
WHERE isStory IS 0;
`
);
db.pragma('user_version = 86');
})();
logger.info('updateToSchemaVersion86: success!');
export default function updateToSchemaVersion86(db: Database): void {
// The key reason for this new schema is that all of our previous schemas start with
// conversationId. This query is meant to find all replies to a given story, no
// matter the conversation.
db.exec(
`CREATE INDEX messages_story_replies
ON messages (storyId, received_at, sent_at)
WHERE isStory IS 0;
`
);
}

View file

@ -3,8 +3,8 @@
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
import { cleanKeys } from './920-clean-more-keys';
import type { LoggerType } from '../../types/Logging';
import { sqlFragment } from '../util';
// Note: for many users, this is not what ran for them as migration 87. You can see that
@ -13,42 +13,38 @@ import { sqlFragment } from '../util';
// The goal of this migration is to ensure that key cleanup happens before migration 88.
export default function updateToSchemaVersion87(
currentVersion: number,
db: Database,
logger: LoggerType
logger: LoggerType,
startingVersion: number
): void {
// We're checking for the version of the next migration here, not this version. We want
// this to run if the user hasn't yet successfully run migration 88.
if (currentVersion >= 88) {
if (startingVersion >= 88) {
return;
}
db.transaction(() => {
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/preKeys',
sqlFragment`preKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`,
sqlFragment`ourUuid`
);
})();
logger.info('updateToSchemaVersion87(cleanup): success!');
cleanKeys(
db,
logger,
'(cleanup)/kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'(cleanup)/preKeys',
sqlFragment`preKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'(cleanup)/signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`,
sqlFragment`ourUuid`
);
}

View file

@ -26,14 +26,9 @@ import { isNotNil } from '../../util/isNotNil';
//
export default function updateToSchemaVersion88(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 88) {
return;
}
// See updateToSchemaVersion84
const selectMentionsFromMessages = `
SELECT messages.id, bodyRanges.value ->> 'mentionAci' as mentionAci,
@ -43,205 +38,199 @@ export default function updateToSchemaVersion88(
WHERE bodyRanges.value ->> 'mentionAci' IS NOT NULL
`;
db.transaction(() => {
// Rename all columns and re-create all indexes first.
db.exec(`
--
-- conversations
--
// Rename all columns and re-create all indexes first.
db.exec(`
--
-- conversations
--
DROP INDEX conversations_uuid;
DROP INDEX conversations_uuid;
ALTER TABLE conversations
RENAME COLUMN uuid TO serviceId;
ALTER TABLE conversations
RENAME COLUMN uuid TO serviceId;
-- See: updateToSchemaVersion20
CREATE INDEX conversations_serviceId ON conversations(serviceId);
-- See: updateToSchemaVersion20
CREATE INDEX conversations_serviceId ON conversations(serviceId);
--
-- sessions
--
--
-- sessions
--
ALTER TABLE sessions
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE sessions
RENAME COLUMN uuid TO serviceId;
ALTER TABLE sessions
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE sessions
RENAME COLUMN uuid TO serviceId;
--
-- messages
--
--
-- messages
--
DROP INDEX messages_sourceUuid;
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
DROP INDEX messages_sourceUuid;
DROP INDEX messages_preview;
DROP INDEX messages_preview_without_story;
DROP INDEX messages_activity;
ALTER TABLE messages
DROP COLUMN isGroupLeaveEventFromOther;
ALTER TABLE messages
DROP COLUMN isGroupLeaveEvent;
ALTER TABLE messages
DROP COLUMN isGroupLeaveEventFromOther;
ALTER TABLE messages
DROP COLUMN isGroupLeaveEvent;
ALTER TABLE messages
RENAME COLUMN sourceUuid TO sourceServiceId;
ALTER TABLE messages
RENAME COLUMN sourceUuid TO sourceServiceId;
-- See: updateToSchemaVersion47
ALTER TABLE messages
ADD COLUMN isGroupLeaveEvent INTEGER
GENERATED ALWAYS AS (
type IS 'group-v2-change' AND
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].aci')
);
ALTER TABLE messages
ADD COLUMN isGroupLeaveEventFromOther INTEGER
GENERATED ALWAYS AS (
isGroupLeaveEvent IS 1
AND
isChangeCreatedByUs IS 0
);
-- See: updateToSchemaVersion25
CREATE INDEX messages_sourceServiceId on messages(sourceServiceId);
-- See: updateToSchemaVersion81
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
isGroupLeaveEventFromOther, received_at, sent_at);
--
-- reactions
--
DROP INDEX reaction_identifier;
ALTER TABLE reactions
RENAME COLUMN targetAuthorUuid TO targetAuthorAci;
-- See: updateToSchemaVersion29
CREATE INDEX reaction_identifier ON reactions (
emoji,
targetAuthorAci,
targetTimestamp
-- See: updateToSchemaVersion47
ALTER TABLE messages
ADD COLUMN isGroupLeaveEvent INTEGER
GENERATED ALWAYS AS (
type IS 'group-v2-change' AND
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].aci')
);
--
-- unprocessed
--
ALTER TABLE messages
ADD COLUMN isGroupLeaveEventFromOther INTEGER
GENERATED ALWAYS AS (
isGroupLeaveEvent IS 1
AND
isChangeCreatedByUs IS 0
);
ALTER TABLE unprocessed
RENAME COLUMN sourceUuid TO sourceServiceId;
-- See: updateToSchemaVersion25
CREATE INDEX messages_sourceServiceId on messages(sourceServiceId);
--
-- sendLogRecipients
--
-- See: updateToSchemaVersion81
CREATE INDEX messages_preview ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at);
CREATE INDEX messages_preview_without_story ON messages
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
received_at, sent_at) WHERE storyId IS NULL;
CREATE INDEX messages_activity ON messages
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
isGroupLeaveEventFromOther, received_at, sent_at);
DROP INDEX sendLogRecipientsByRecipient;
--
-- reactions
--
ALTER TABLE sendLogRecipients
RENAME COLUMN recipientUuid TO recipientServiceId;
DROP INDEX reaction_identifier;
-- See: updateToSchemaVersion37
CREATE INDEX sendLogRecipientsByRecipient
ON sendLogRecipients (recipientServiceId, deviceId);
ALTER TABLE reactions
RENAME COLUMN targetAuthorUuid TO targetAuthorAci;
--
-- storyDistributionMembers
--
-- See: updateToSchemaVersion29
CREATE INDEX reaction_identifier ON reactions (
emoji,
targetAuthorAci,
targetTimestamp
);
ALTER TABLE storyDistributionMembers
RENAME COLUMN uuid TO serviceId;
--
-- unprocessed
--
--
-- mentions
--
ALTER TABLE unprocessed
RENAME COLUMN sourceUuid TO sourceServiceId;
DROP TRIGGER messages_on_update;
DROP TRIGGER messages_on_insert_insert_mentions;
DROP TRIGGER messages_on_update_update_mentions;
DROP INDEX mentions_uuid;
--
-- sendLogRecipients
--
ALTER TABLE mentions
RENAME COLUMN mentionUuid TO mentionAci;
DROP INDEX sendLogRecipientsByRecipient;
-- See: updateToSchemaVersion84
CREATE INDEX mentions_aci ON mentions (mentionAci);
ALTER TABLE sendLogRecipients
RENAME COLUMN recipientUuid TO recipientServiceId;
--
-- preKeys
--
-- See: updateToSchemaVersion37
CREATE INDEX sendLogRecipientsByRecipient
ON sendLogRecipients (recipientServiceId, deviceId);
DROP INDEX preKeys_ourUuid;
DROP INDEX signedPreKeys_ourUuid;
DROP INDEX kyberPreKeys_ourUuid;
--
-- storyDistributionMembers
--
ALTER TABLE preKeys
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE signedPreKeys
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE kyberPreKeys
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE storyDistributionMembers
RENAME COLUMN uuid TO serviceId;
-- See: updateToSchemaVersion64
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
`);
--
-- mentions
--
// Migrate JSON fields
const { identifierToServiceId } = migrateConversations(db, logger);
const ourServiceIds = migrateItems(db, logger);
migrateSessions(db, ourServiceIds, logger);
migrateMessages(db, logger);
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
migrateJobs(db, identifierToServiceId, logger);
DROP TRIGGER messages_on_update;
DROP TRIGGER messages_on_insert_insert_mentions;
DROP TRIGGER messages_on_update_update_mentions;
DROP INDEX mentions_uuid;
// Re-create triggers after updating messages
db.exec(`
-- See: updateToSchemaVersion45
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
WHEN
(new.body IS NULL OR old.body IS NOT new.body) AND
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
ALTER TABLE mentions
RENAME COLUMN mentionUuid TO mentionAci;
-- See: updateToSchemaVersion84
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
BEGIN
INSERT INTO mentions (messageId, mentionAci, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
-- See: updateToSchemaVersion84
CREATE INDEX mentions_aci ON mentions (mentionAci);
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
BEGIN
DELETE FROM mentions WHERE messageId = new.id;
INSERT INTO mentions (messageId, mentionAci, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
`);
--
-- preKeys
--
db.pragma('user_version = 88');
})();
DROP INDEX preKeys_ourUuid;
DROP INDEX signedPreKeys_ourUuid;
DROP INDEX kyberPreKeys_ourUuid;
logger.info('updateToSchemaVersion88: success!');
ALTER TABLE preKeys
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE signedPreKeys
RENAME COLUMN ourUuid TO ourServiceId;
ALTER TABLE kyberPreKeys
RENAME COLUMN ourUuid TO ourServiceId;
-- See: updateToSchemaVersion64
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
`);
// Migrate JSON fields
const { identifierToServiceId } = migrateConversations(db, logger);
const ourServiceIds = migrateItems(db, logger);
migrateSessions(db, ourServiceIds, logger);
migrateMessages(db, logger);
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
migrateJobs(db, identifierToServiceId, logger);
// Re-create triggers after updating messages
db.exec(`
-- See: updateToSchemaVersion45
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
WHEN
(new.body IS NULL OR old.body IS NOT new.body) AND
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
BEGIN
DELETE FROM messages_fts WHERE rowid = old.rowid;
INSERT INTO messages_fts
(rowid, body)
VALUES
(new.rowid, new.body);
END;
-- See: updateToSchemaVersion84
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
BEGIN
INSERT INTO mentions (messageId, mentionAci, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
BEGIN
DELETE FROM mentions WHERE messageId = new.id;
INSERT INTO mentions (messageId, mentionAci, start, length)
${selectMentionsFromMessages}
AND messages.id = new.id;
END;
`);
}
//
@ -338,9 +327,7 @@ function migrateConversations(
'UPDATE conversations SET json = $json WHERE id IS $id'
);
logger.info(
`updateToSchemaVersion88: updating ${convos.length} conversations`
);
logger.info(`updating ${convos.length} conversations`);
// Build lookup map for senderKeyInfo
const identifierToServiceId = new Map<string, ServiceIdString>();
@ -439,9 +426,7 @@ function migrateConversations(
.map(({ identifier, ...rest }) => {
const deviceServiceId = identifierToServiceId.get(identifier);
if (!deviceServiceId) {
logger.warn(
`updateToSchemaVersion88: failed to resolve identifier ${identifier}`
);
logger.warn(`failed to resolve identifier ${identifier}`);
return undefined;
}
@ -454,10 +439,7 @@ function migrateConversations(
updateStmt.run({ id, json: JSON.stringify(modern) });
} catch (error) {
logger.warn(
`updateToSchemaVersion88: failed to parse convo ${id} json`,
error
);
logger.warn(`failed to parse convo ${id} json`, error);
continue;
}
}
@ -511,12 +493,9 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
[legacyAci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
} catch (error) {
if (uuidIdJson) {
logger.warn(
'updateToSchemaVersion88: failed to parse uuid_id item',
error
);
logger.warn('failed to parse uuid_id item', error);
} else {
logger.info('updateToSchemaVersion88: Our UUID not found');
logger.info('Our UUID not found');
}
}
@ -525,9 +504,9 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
legacyPni = JSON.parse(pniJson ?? '').value;
} catch (error) {
if (pniJson) {
logger.warn('updateToSchemaVersion88: failed to parse pni item', error);
logger.warn('failed to parse pni item', error);
} else {
logger.info('updateToSchemaVersion88: Our PNI not found');
logger.info('Our PNI not found');
}
}
@ -573,7 +552,7 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
updateStmt.run({ id, json: JSON.stringify(data) });
} catch (error) {
logger.warn(`updateToSchemaVersion88: failed to parse ${id} item`, error);
logger.warn(`failed to parse ${id} item`, error);
}
}
return { aci, pni, legacyAci, legacyPni };
@ -611,21 +590,18 @@ function migrateSessions(
`
);
logger.info(`updateToSchemaVersion88: updating ${sessions.length} sessions`);
logger.info(`updating ${sessions.length} sessions`);
for (const { id, serviceId, ourServiceId, json } of sessions) {
const match = id.match(/^(.*):(.*)\.(.*)$/);
if (!match) {
logger.warn(`updateToSchemaVersion88: invalid session id ${id}`);
logger.warn(`invalid session id ${id}`);
continue;
}
let legacyData: JSONWithUnknownFields<Record<string, unknown>>;
try {
legacyData = JSON.parse(json);
} catch (error) {
logger.warn(
`updateToSchemaVersion88: failed to parse session ${id}`,
error
);
logger.warn(`failed to parse session ${id}`, error);
continue;
}
@ -642,7 +618,7 @@ function migrateSessions(
);
if (!newServiceId || !newOurServiceId) {
logger.warn(
'updateToSchemaVersion88: failed to normalize session service ids',
'failed to normalize session service ids',
serviceId,
ourServiceId
);
@ -753,7 +729,7 @@ function migrateMessages(db: Database, logger: LoggerType): void {
WHERE rowid = $rowid
`);
logger.info('updateToSchemaVersion88: updating messages');
logger.info('updating messages');
let totalMessages = 0;
// eslint-disable-next-line no-constant-condition
@ -832,15 +808,12 @@ function migrateMessages(db: Database, logger: LoggerType): void {
json: JSON.stringify(updatedMessage),
});
} catch (error) {
logger.warn(
`updateToSchemaVersion88: failed to parse message ${id} json`,
error
);
logger.warn(`failed to parse message ${id} json`, error);
}
}
}
logger.info(`updateToSchemaVersion88: updated ${totalMessages} messages`);
logger.info(`updated ${totalMessages} messages`);
}
// migratePreKeys works similarly to migrateSessions and does:
@ -867,11 +840,11 @@ function migratePreKeys(
WHERE id = $id
`);
logger.info(`updateToSchemaVersion88: updating ${preKeys.length} ${table}`);
logger.info(`updating ${preKeys.length} ${table}`);
for (const { id, json } of preKeys) {
const match = id.match(/^(.*):(.*)$/);
if (!match) {
logger.warn(`updateToSchemaVersion88: invalid ${table} id ${id}`);
logger.warn(`invalid ${table} id ${id}`);
continue;
}
@ -879,10 +852,7 @@ function migratePreKeys(
try {
legacyData = JSON.parse(json);
} catch (error) {
logger.warn(
`updateToSchemaVersion88: failed to parse ${table} ${id}`,
error
);
logger.warn(`failed to parse ${table} ${id}`, error);
continue;
}
@ -1118,7 +1088,7 @@ function migrateJobs(
const serviceId = identifierToServiceId.get(identifier);
if (!serviceId) {
logger.warn(
`updateToSchemaVersion88: failed to resolve identifier ${identifier} ` +
`failed to resolve identifier ${identifier} ` +
`for job ${id}/${queueType}`
);
continue;
@ -1137,14 +1107,11 @@ function migrateJobs(
updateStmt.run({ id, data: JSON.stringify(updatedData) });
}
} catch (error) {
logger.warn(
`updateToSchemaVersion88: failed to migrate job ${id}/${queueType} json`,
error
);
logger.warn(`failed to migrate job ${id}/${queueType} json`, error);
}
}
logger.info(`updateToSchemaVersion88: updated ${updatedCount} jobs`);
logger.info(`updated ${updatedCount} jobs`);
}
//

View file

@ -94,7 +94,7 @@ function getPeerIdFromConversation(
if (conversation.type === 'private') {
if (conversation.serviceId == null) {
logger.warn(
`updateToSchemaVersion89: Private conversation (${conversation.id}) was missing serviceId (discoveredUnregisteredAt: ${conversation.discoveredUnregisteredAt})`
`Private conversation (${conversation.id}) was missing serviceId (discoveredUnregisteredAt: ${conversation.discoveredUnregisteredAt})`
);
return conversation.id;
}
@ -191,205 +191,194 @@ function convertLegacyCallDetails(
}
export default function updateToSchemaVersion89(
currentVersion: number,
db: WritableDB,
logger: LoggerType
): void {
if (currentVersion >= 89) {
return;
const ourUuid = getOurUuid(db);
const [createTable] = sql`
-- This table may have already existed from migration 87
CREATE TABLE IF NOT EXISTS callsHistory (
callId TEXT PRIMARY KEY,
peerId TEXT NOT NULL, -- conversation id (legacy) | uuid | groupId | roomId
ringerId TEXT DEFAULT NULL, -- ringer uuid
mode TEXT NOT NULL, -- enum "Direct" | "Group"
type TEXT NOT NULL, -- enum "Audio" | "Video" | "Group"
direction TEXT NOT NULL, -- enum "Incoming" | "Outgoing
-- Direct: enum "Pending" | "Missed" | "Accepted" | "Deleted"
-- Group: enum "GenericGroupCall" | "OutgoingRing" | "Ringing" | "Joined" | "Missed" | "Declined" | "Accepted" | "Deleted"
status TEXT NOT NULL,
timestamp INTEGER NOT NULL,
UNIQUE (callId, peerId) ON CONFLICT FAIL
);
-- Update peerId to be uuid or groupId
UPDATE callsHistory
SET peerId = (
SELECT
CASE
WHEN conversations.type = 'private' THEN conversations.serviceId
WHEN conversations.type = 'group' THEN conversations.groupId
END
FROM conversations
WHERE callsHistory.peerId IS conversations.id
AND callsHistory.peerId IS NOT conversations.serviceId
)
WHERE EXISTS (
SELECT 1
FROM conversations
WHERE callsHistory.peerId IS conversations.id
AND callsHistory.peerId IS NOT conversations.serviceId
);
CREATE INDEX IF NOT EXISTS callsHistory_order on callsHistory (timestamp DESC);
CREATE INDEX IF NOT EXISTS callsHistory_byConversation ON callsHistory (peerId);
-- For 'getCallHistoryGroupData':
-- This index should target the subqueries for 'possible_parent' and 'possible_children'
CREATE INDEX IF NOT EXISTS callsHistory_callAndGroupInfo_optimize on callsHistory (
direction,
peerId,
timestamp DESC,
status
);
`;
db.exec(createTable);
const [selectQuery] = sql`
SELECT
messages.json AS messageJson,
conversations.id AS conversationId,
conversations.json AS conversationJson
FROM messages
LEFT JOIN conversations ON conversations.id = messages.conversationId
WHERE messages.type = 'call-history'
-- Some of these messages were already migrated
AND messages.json->'callHistoryDetails' IS NOT NULL
-- Sort from oldest to newest, so that newer messages can overwrite older
ORDER BY messages.received_at ASC, messages.sent_at ASC;
`;
// Must match query above
type CallHistoryRow = {
messageJson: string;
conversationId: string;
conversationJson: string;
};
const rows: Array<CallHistoryRow> = db.prepare(selectQuery).all();
for (const row of rows) {
const { messageJson, conversationId, conversationJson } = row;
const message = jsonToObject<MessageWithCallHistoryDetails>(messageJson);
const conversation = jsonToObject<ConversationType>(conversationJson);
if (!isObject(conversation)) {
logger.warn(
`Private conversation (${conversationId}) ` +
'has non-object json column'
);
continue;
}
const details = message.callHistoryDetails;
const peerId = getPeerIdFromConversation(conversation, logger);
const callHistory = convertLegacyCallDetails(
ourUuid,
peerId,
message,
details,
logger
);
const [insertQuery, insertParams] = sql`
-- Using 'OR REPLACE' because in some earlier versions of call history
-- we had a bug where we would insert duplicate call history entries
-- for the same callId and peerId.
-- We're assuming here that the latest call history entry is the most
-- accurate.
INSERT OR REPLACE INTO callsHistory (
callId,
peerId,
ringerId,
mode,
type,
direction,
status,
timestamp
) VALUES (
${callHistory.callId},
${callHistory.peerId},
${callHistory.ringerId},
${callHistory.mode},
${callHistory.type},
${callHistory.direction},
${callHistory.status},
${callHistory.timestamp}
)
`;
db.prepare(insertQuery).run(insertParams);
const messageId = message.id;
strictAssert(messageId != null, 'message.id must exist');
const [updateQuery, updateParams] = sql`
UPDATE messages
SET json = JSON_PATCH(json, ${JSON.stringify({
callHistoryDetails: null, // delete
callId: callHistory.callId,
})})
WHERE id = ${messageId}
`;
db.prepare(updateQuery).run(updateParams);
}
db.transaction(() => {
const ourUuid = getOurUuid(db);
const [dropIndex] = sql`
DROP INDEX IF EXISTS messages_call;
`;
db.exec(dropIndex);
const [createTable] = sql`
-- This table may have already existed from migration 87
CREATE TABLE IF NOT EXISTS callsHistory (
callId TEXT PRIMARY KEY,
peerId TEXT NOT NULL, -- conversation id (legacy) | uuid | groupId | roomId
ringerId TEXT DEFAULT NULL, -- ringer uuid
mode TEXT NOT NULL, -- enum "Direct" | "Group"
type TEXT NOT NULL, -- enum "Audio" | "Video" | "Group"
direction TEXT NOT NULL, -- enum "Incoming" | "Outgoing
-- Direct: enum "Pending" | "Missed" | "Accepted" | "Deleted"
-- Group: enum "GenericGroupCall" | "OutgoingRing" | "Ringing" | "Joined" | "Missed" | "Declined" | "Accepted" | "Deleted"
status TEXT NOT NULL,
timestamp INTEGER NOT NULL,
UNIQUE (callId, peerId) ON CONFLICT FAIL
);
-- Update peerId to be uuid or groupId
UPDATE callsHistory
SET peerId = (
SELECT
CASE
WHEN conversations.type = 'private' THEN conversations.serviceId
WHEN conversations.type = 'group' THEN conversations.groupId
END
FROM conversations
WHERE callsHistory.peerId IS conversations.id
AND callsHistory.peerId IS NOT conversations.serviceId
)
WHERE EXISTS (
SELECT 1
FROM conversations
WHERE callsHistory.peerId IS conversations.id
AND callsHistory.peerId IS NOT conversations.serviceId
);
CREATE INDEX IF NOT EXISTS callsHistory_order on callsHistory (timestamp DESC);
CREATE INDEX IF NOT EXISTS callsHistory_byConversation ON callsHistory (peerId);
-- For 'getCallHistoryGroupData':
-- This index should target the subqueries for 'possible_parent' and 'possible_children'
CREATE INDEX IF NOT EXISTS callsHistory_callAndGroupInfo_optimize on callsHistory (
direction,
peerId,
timestamp DESC,
status
);
`;
db.exec(createTable);
const [selectQuery] = sql`
SELECT
messages.json AS messageJson,
conversations.id AS conversationId,
conversations.json AS conversationJson
FROM messages
LEFT JOIN conversations ON conversations.id = messages.conversationId
WHERE messages.type = 'call-history'
-- Some of these messages were already migrated
AND messages.json->'callHistoryDetails' IS NOT NULL
-- Sort from oldest to newest, so that newer messages can overwrite older
ORDER BY messages.received_at ASC, messages.sent_at ASC;
`;
// Must match query above
type CallHistoryRow = {
messageJson: string;
conversationId: string;
conversationJson: string;
};
const rows: Array<CallHistoryRow> = db.prepare(selectQuery).all();
for (const row of rows) {
const { messageJson, conversationId, conversationJson } = row;
const message = jsonToObject<MessageWithCallHistoryDetails>(messageJson);
const conversation = jsonToObject<ConversationType>(conversationJson);
if (!isObject(conversation)) {
logger.warn(
`updateToSchemaVersion89: Private conversation (${conversationId}) ` +
'has non-object json column'
);
continue;
}
const details = message.callHistoryDetails;
const peerId = getPeerIdFromConversation(conversation, logger);
const callHistory = convertLegacyCallDetails(
ourUuid,
peerId,
message,
details,
logger
);
const [insertQuery, insertParams] = sql`
-- Using 'OR REPLACE' because in some earlier versions of call history
-- we had a bug where we would insert duplicate call history entries
-- for the same callId and peerId.
-- We're assuming here that the latest call history entry is the most
-- accurate.
INSERT OR REPLACE INTO callsHistory (
callId,
peerId,
ringerId,
mode,
type,
direction,
status,
timestamp
) VALUES (
${callHistory.callId},
${callHistory.peerId},
${callHistory.ringerId},
${callHistory.mode},
${callHistory.type},
${callHistory.direction},
${callHistory.status},
${callHistory.timestamp}
)
`;
db.prepare(insertQuery).run(insertParams);
const messageId = message.id;
strictAssert(messageId != null, 'message.id must exist');
const [updateQuery, updateParams] = sql`
UPDATE messages
SET json = JSON_PATCH(json, ${JSON.stringify({
callHistoryDetails: null, // delete
callId: callHistory.callId,
})})
WHERE id = ${messageId}
`;
db.prepare(updateQuery).run(updateParams);
}
const [dropIndex] = sql`
DROP INDEX IF EXISTS messages_call;
`;
db.exec(dropIndex);
try {
const [dropColumnQuery] = sql`
ALTER TABLE messages
DROP COLUMN callMode;
`;
db.exec(dropColumnQuery);
} catch (error) {
if (!error.message.includes('no such column: "callMode"')) {
throw error;
}
}
try {
const [dropColumnQuery] = sql`
ALTER TABLE messages
DROP COLUMN callId;
`;
db.exec(dropColumnQuery);
} catch (error) {
if (!error.message.includes('no such column: "callId"')) {
throw error;
}
}
const [optimizeMessages] = sql`
try {
const [dropColumnQuery] = sql`
ALTER TABLE messages
ADD COLUMN callId TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callId')
);
-- Optimize getCallHistoryMessageByCallId
CREATE INDEX messages_call ON messages
(conversationId, type, callId);
CREATE INDEX messages_callHistory_readStatus ON messages
(type, readStatus)
WHERE type IS 'call-history';
DROP COLUMN callMode;
`;
db.exec(optimizeMessages);
db.exec(dropColumnQuery);
} catch (error) {
if (!error.message.includes('no such column: "callMode"')) {
throw error;
}
}
db.pragma('user_version = 89');
})();
try {
const [dropColumnQuery] = sql`
ALTER TABLE messages
DROP COLUMN callId;
`;
db.exec(dropColumnQuery);
} catch (error) {
if (!error.message.includes('no such column: "callId"')) {
throw error;
}
}
logger.info('updateToSchemaVersion89: success!');
const [optimizeMessages] = sql`
ALTER TABLE messages
ADD COLUMN callId TEXT
GENERATED ALWAYS AS (
json_extract(json, '$.callId')
);
-- Optimize getCallHistoryMessageByCallId
CREATE INDEX messages_call ON messages
(conversationId, type, callId);
CREATE INDEX messages_callHistory_readStatus ON messages
(type, readStatus)
WHERE type IS 'call-history';
`;
db.exec(optimizeMessages);
}

View file

@ -7,39 +7,28 @@ import type { LoggerType } from '../../types/Logging';
import { sql } from '../util';
export default function updateToSchemaVersion90(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 90) {
return;
}
let numChanges = 0;
db.transaction(() => {
const [updateQuery, updateParams] = sql`
UPDATE messages
SET json = json_remove(json, '$.storyReplyContext.attachment.screenshotData')
WHERE isStory = 0
const [updateQuery, updateParams] = sql`
UPDATE messages
SET json = json_remove(json, '$.storyReplyContext.attachment.screenshotData')
WHERE isStory = 0
/* we want to find all messages with a non-null storyId, but using string
comparison (instead of a non-null check) here causes Sqlite to use the
storyId index */
AND storyId > '0'
/* we want to find all messages with a non-null storyId, but using string
comparison (instead of a non-null check) here causes Sqlite to use the
storyId index */
AND storyId > '0'
AND json->'$.storyReplyContext.attachment.screenshotData' IS NOT NULL;
`;
AND json->'$.storyReplyContext.attachment.screenshotData' IS NOT NULL;
`;
const info = db.prepare(updateQuery).run(updateParams);
numChanges = info.changes;
db.pragma('user_version = 90');
})();
const info = db.prepare(updateQuery).run(updateParams);
numChanges = info.changes;
logger.info(
`updateToSchemaVersion90: removed screenshotData from ${numChanges} ` +
`removed screenshotData from ${numChanges} ` +
`message${numChanges > 1 ? 's' : ''}`
);
logger.info('updateToSchemaVersion90: success!');
}

View file

@ -10,202 +10,180 @@ import { normalizePni } from '../../types/ServiceId';
import * as Errors from '../../types/errors';
export default function updateToSchemaVersion91(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 91) {
// Fix the ourServiceId column so it's generated from the right JSON field
db.exec(`
--- First, prekeys
DROP INDEX preKeys_ourServiceId;
ALTER TABLE preKeys
DROP COLUMN ourServiceId;
ALTER TABLE preKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
-- Second, kyber prekeys
DROP INDEX kyberPreKeys_ourServiceId;
ALTER TABLE kyberPreKeys
DROP COLUMN ourServiceId;
ALTER TABLE kyberPreKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
-- Finally, signed prekeys
DROP INDEX signedPreKeys_ourServiceId;
ALTER TABLE signedPreKeys
DROP COLUMN ourServiceId;
ALTER TABLE signedPreKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
`);
// Do overall count - if it's less than 1000, move on
const totalKeys =
db
.prepare('SELECT count(*) FROM preKeys;', {
pluck: true,
})
.get<number>() ?? 0;
logger.info(`Found ${totalKeys} keys`);
if (totalKeys < 1000) {
return;
}
db.transaction(() => {
// Fix the ourServiceId column so it's generated from the right JSON field
// Grab our PNI
db.exec(`
--- First, prekeys
DROP INDEX preKeys_ourServiceId;
ALTER TABLE preKeys
DROP COLUMN ourServiceId;
ALTER TABLE preKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
-- Second, kyber prekeys
DROP INDEX kyberPreKeys_ourServiceId;
ALTER TABLE kyberPreKeys
DROP COLUMN ourServiceId;
ALTER TABLE kyberPreKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
-- Finally, signed prekeys
DROP INDEX signedPreKeys_ourServiceId;
ALTER TABLE signedPreKeys
DROP COLUMN ourServiceId;
ALTER TABLE signedPreKeys
ADD COLUMN ourServiceId NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
`);
// Do overall count - if it's less than 1000, move on
const totalKeys =
db
.prepare('SELECT count(*) FROM preKeys;', {
pluck: true,
})
.get<number>() ?? 0;
logger.info(`updateToSchemaVersion91: Found ${totalKeys} keys`);
if (totalKeys < 1000) {
db.pragma('user_version = 91');
return;
let pni: PniString;
const pniJson = db
.prepare("SELECT json FROM items WHERE id IS 'pni'", {
pluck: true,
})
.get<string>();
try {
const pniData = JSON.parse(pniJson ?? '');
pni = normalizePni(pniData.value, 'updateToSchemaVersion91');
} catch (error) {
if (pniJson) {
logger.warn('PNI found but did not parse', Errors.toLogFormat(error));
} else {
logger.info('Our PNI not found');
}
return;
}
// Grab our PNI
// Grab PNI-specific count
let pni: PniString;
const pniJson = db
.prepare("SELECT json FROM items WHERE id IS 'pni'", {
pluck: true,
})
.get<string>();
try {
const pniData = JSON.parse(pniJson ?? '');
pni = normalizePni(pniData.value, 'updateToSchemaVersion91');
} catch (error) {
db.pragma('user_version = 91');
if (pniJson) {
logger.warn(
'updateToSchemaVersion91: PNI found but did not parse',
Errors.toLogFormat(error)
);
} else {
logger.info('updateToSchemaVersion91: Our PNI not found');
}
return;
}
const [beforeQuery, beforeParams] =
sql`SELECT count(*) from preKeys WHERE ourServiceId = ${pni}`;
const beforeKeys = db
.prepare(beforeQuery, {
pluck: true,
})
.get(beforeParams);
logger.info(`Found ${beforeKeys} preKeys for PNI`);
// Grab PNI-specific count
// Create index to help us with all these queries
const [beforeQuery, beforeParams] =
sql`SELECT count(*) from preKeys WHERE ourServiceId = ${pni}`;
const beforeKeys = db
.prepare(beforeQuery, {
pluck: true,
})
.get(beforeParams);
logger.info(`updateToSchemaVersion91: Found ${beforeKeys} preKeys for PNI`);
db.exec(`
ALTER TABLE preKeys
ADD COLUMN createdAt NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.createdAt'));
CREATE INDEX preKeys_date
ON preKeys (ourServiceId, createdAt);
`);
logger.info('Temporary index created');
// Create index to help us with all these queries
// Fetch 500th-oldest timestamp for PNI
db.exec(`
ALTER TABLE preKeys
ADD COLUMN createdAt NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.createdAt'));
CREATE INDEX preKeys_date
ON preKeys (ourServiceId, createdAt);
`);
logger.info('updateToSchemaVersion91: Temporary index created');
const [oldQuery, oldParams] = sql`
SELECT createdAt
FROM preKeys
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt ASC
LIMIT 1
OFFSET 499
`;
const oldBoundary = db
.prepare(oldQuery, {
pluck: true,
})
.get(oldParams);
logger.info(`Found 500th-oldest timestamp: ${oldBoundary}`);
// Fetch 500th-oldest timestamp for PNI
// Fetch 500th-newest timestamp for PNI
const [oldQuery, oldParams] = sql`
SELECT createdAt
FROM preKeys
WHERE
const [newQuery, newParams] = sql`
SELECT createdAt
FROM preKeys
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt DESC
LIMIT 1
OFFSET 499
`;
const newBoundary = db
.prepare(newQuery, {
pluck: true,
})
.get(newParams);
logger.info(`Found 500th-newest timestamp: ${newBoundary}`);
// Delete everything in between for PNI
let result: RunResult;
const [deleteQuery, deleteParams] = sql`
DELETE FROM preKeys
WHERE rowid IN (
SELECT rowid FROM preKeys
WHERE
createdAt IS NOT NULL AND
createdAt > ${oldBoundary ?? null} AND
createdAt < ${newBoundary ?? null} AND
ourServiceId = ${pni}
ORDER BY createdAt ASC
LIMIT 1
OFFSET 499
`;
const oldBoundary = db
.prepare(oldQuery, {
pluck: true,
})
.get(oldParams);
logger.info(
`updateToSchemaVersion91: Found 500th-oldest timestamp: ${oldBoundary}`
LIMIT 10000
);
`;
const preparedQuery = db.prepare(deleteQuery);
do {
result = preparedQuery.run(deleteParams);
logger.info(`Deleted ${result.changes} items`);
} while (result.changes > 0);
logger.info('Delete is complete!');
// Fetch 500th-newest timestamp for PNI
// Get updated count for PNI
const [newQuery, newParams] = sql`
SELECT createdAt
FROM preKeys
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt DESC
LIMIT 1
OFFSET 499
`;
const newBoundary = db
.prepare(newQuery, {
pluck: true,
})
.get(newParams);
logger.info(
`updateToSchemaVersion91: Found 500th-newest timestamp: ${newBoundary}`
);
const [afterQuery, afterParams] = sql`
SELECT count(*)
FROM preKeys
WHERE ourServiceId = ${pni};
`;
const afterCount = db
.prepare(afterQuery, {
pluck: true,
})
.get(afterParams);
logger.info(`Found ${afterCount} preKeys for PNI after delete`);
// Delete everything in between for PNI
let result: RunResult;
const [deleteQuery, deleteParams] = sql`
DELETE FROM preKeys
WHERE rowid IN (
SELECT rowid FROM preKeys
WHERE
createdAt IS NOT NULL AND
createdAt > ${oldBoundary ?? null} AND
createdAt < ${newBoundary ?? null} AND
ourServiceId = ${pni}
LIMIT 10000
);
`;
const preparedQuery = db.prepare(deleteQuery);
do {
result = preparedQuery.run(deleteParams);
logger.info(`updateToSchemaVersion91: Deleted ${result.changes} items`);
} while (result.changes > 0);
logger.info('updateToSchemaVersion91: Delete is complete!');
// Get updated count for PNI
const [afterQuery, afterParams] = sql`
SELECT count(*)
FROM preKeys
WHERE ourServiceId = ${pni};
`;
const afterCount = db
.prepare(afterQuery, {
pluck: true,
})
.get(afterParams);
logger.info(
`updateToSchemaVersion91: Found ${afterCount} preKeys for PNI after delete`
);
db.exec(`
DROP INDEX preKeys_date;
ALTER TABLE preKeys DROP COLUMN createdAt;
`);
db.pragma('user_version = 91');
})();
logger.info('updateToSchemaVersion91: success!');
db.exec(`
DROP INDEX preKeys_date;
ALTER TABLE preKeys DROP COLUMN createdAt;
`);
}

View file

@ -11,47 +11,30 @@ import { sql, sqlFragment } from '../util';
import { normalizePni } from '../../types/ServiceId';
import * as Errors from '../../types/errors';
export const version = 920;
export function updateToSchemaVersion920(
currentVersion: number,
export default function updateToSchemaVersion920(
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 920) {
return;
}
db.transaction(() => {
cleanKeys(
db,
logger,
'updateToSchemaVersion920/kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`,
sqlFragment`ourServiceId`
);
cleanKeys(
db,
logger,
'updateToSchemaVersion920/signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`,
sqlFragment`ourServiceId`
);
logger.info('updateToSchemaVersion920: Done with deletions');
db.pragma('user_version = 920');
})();
logger.info(
'updateToSchemaVersion920: user_version set to 920. Starting vacuum...'
): 'vacuum' {
cleanKeys(
db,
logger,
'kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`,
sqlFragment`ourServiceId`
);
cleanKeys(
db,
logger,
'signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`,
sqlFragment`ourServiceId`
);
db.exec('VACUUM;');
logger.info('updateToSchemaVersion920: Vacuum complete.');
logger.info('updateToSchemaVersion920: success!');
logger.info('Done with deletions, starting vacuum...');
return 'vacuum';
}
export function cleanKeys(

View file

@ -1,26 +1,6 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 930;
export function updateToSchemaVersion930(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 930) {
return;
}
db.transaction(() => {
// This was a migration that enabled 'secure-delete' in FTS
db.pragma('user_version = 930');
})();
logger.info('updateToSchemaVersion930: success!');
export default function updateToSchemaVersion930(): void {
// This was a migration that enabled 'secure-delete' in FTS
}

View file

@ -1,25 +1,6 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 940;
export function updateToSchemaVersion940(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 940) {
return;
}
db.transaction(() => {
// This was a migration that disabled secure-delete and rebuilt the index
db.pragma('user_version = 940');
})();
logger.info('updateToSchemaVersion940: success!');
export default function updateToSchemaVersion940(): void {
// This was a migration that disabled secure-delete and rebuilt the index
}

View file

@ -1,25 +1,6 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 950;
export function updateToSchemaVersion950(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 950) {
return;
}
db.transaction(() => {
// This was a migration that enable secure-delete
db.pragma('user_version = 950');
})();
logger.info('updateToSchemaVersion950: success!');
export default function updateToSchemaVersion950(): void {
// This was a migration that enable secure-delete
}

View file

@ -13,44 +13,31 @@ import { normalizePni } from '../../types/ServiceId';
import { normalizeAci } from '../../util/normalizeAci';
import type { JSONWithUnknownFields } from '../../types/Util';
export const version = 960;
export function updateToSchemaVersion960(
currentVersion: number,
export default function updateToSchemaVersion960(
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 960) {
const ourServiceIds = migratePni(db, logger);
if (!ourServiceIds) {
logger.info('not running, pni is normalized');
return;
}
db.transaction(() => {
const ourServiceIds = migratePni(db, logger);
if (!ourServiceIds) {
logger.info('updateToSchemaVersion960: not running, pni is normalized');
return;
}
// Migrate JSON fields
db.prepare(
`
UPDATE conversations
SET json = json_set(json, '$.pni', $pni)
WHERE serviceId IS $aci
`
).run({
aci: ourServiceIds.aci,
pni: ourServiceIds.pni,
});
// Migrate JSON fields
db.prepare(
`
UPDATE conversations
SET json = json_set(json, '$.pni', $pni)
WHERE serviceId IS $aci
`
).run({
aci: ourServiceIds.aci,
pni: ourServiceIds.pni,
});
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
db.pragma('user_version = 960');
})();
logger.info('updateToSchemaVersion960: success!');
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
}
//
@ -101,12 +88,9 @@ function migratePni(
[aci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
} catch (error) {
if (uuidIdJson) {
logger.warn(
'updateToSchemaVersion960: failed to parse uuid_id item',
error
);
logger.warn('failed to parse uuid_id item', error);
} else {
logger.info('updateToSchemaVersion960: Our ACI not found');
logger.info('Our ACI not found');
}
}
if (!aci) {
@ -118,9 +102,9 @@ function migratePni(
legacyPni = JSON.parse(pniJson ?? '').value;
} catch (error) {
if (pniJson) {
logger.warn('updateToSchemaVersion960: failed to parse pni item', error);
logger.warn('failed to parse pni item', error);
} else {
logger.info('updateToSchemaVersion960: Our PNI not found');
logger.info('Our PNI not found');
}
}
if (!legacyPni) {
@ -164,10 +148,7 @@ function migratePni(
updateStmt.run({ id, json: JSON.stringify(data) });
} catch (error) {
logger.warn(
`updateToSchemaVersion960: failed to parse ${id} item`,
error
);
logger.warn(`failed to parse ${id} item`, error);
}
}
return {
@ -200,11 +181,11 @@ function migratePreKeys(
WHERE id = $id
`);
logger.info(`updateToSchemaVersion960: updating ${preKeys.length} ${table}`);
logger.info(`updating ${preKeys.length} ${table}`);
for (const { id, json } of preKeys) {
const match = id.match(/^(.*):(.*)$/);
if (!match) {
logger.warn(`updateToSchemaVersion960: invalid ${table} id ${id}`);
logger.warn(`invalid ${table} id ${id}`);
continue;
}
@ -212,20 +193,13 @@ function migratePreKeys(
try {
legacyData = JSON.parse(json);
} catch (error) {
logger.warn(
`updateToSchemaVersion960: failed to parse ${table} ${id}`,
error
);
logger.warn(`failed to parse ${table} ${id}`, error);
continue;
}
const [, ourServiceId, keyId] = match;
if (ourServiceId !== legacyPni) {
logger.warn(
'updateToSchemaVersion960: unexpected ourServiceId',
ourServiceId,
legacyPni
);
logger.warn('unexpected ourServiceId', ourServiceId, legacyPni);
continue;
}

View file

@ -2,29 +2,9 @@
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/sqlcipher';
import type { LoggerType } from '../../types/Logging';
export const version = 970;
export function updateToSchemaVersion970(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 970) {
return;
}
const start = Date.now();
db.transaction(() => {
db.exec(`
INSERT INTO messages_fts(messages_fts) VALUES ('optimize');
`);
db.pragma('user_version = 970');
})();
const duration = Date.now() - start;
logger.info(
`updateToSchemaVersion970: success! fts optimize took ${duration}ms`
);
export default function updateToSchemaVersion970(db: Database): void {
db.exec(`
INSERT INTO messages_fts(messages_fts) VALUES ('optimize');
`);
}

Some files were not shown because too many files have changed in this diff Show more