Simplify database migrations
This commit is contained in:
parent
46d5b06bfc
commit
e6809c95db
106 changed files with 4661 additions and 6814 deletions
|
@ -3,58 +3,41 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { ReadStatus } from '../../messages/MessageReadStatus';
|
import { ReadStatus } from '../../messages/MessageReadStatus';
|
||||||
import { SeenStatus } from '../../MessageSeenStatus';
|
import { SeenStatus } from '../../MessageSeenStatus';
|
||||||
import { strictAssert } from '../../util/assert';
|
import { strictAssert } from '../../util/assert';
|
||||||
import { sql, sqlConstant } from '../util';
|
import { sql, sqlConstant } from '../util';
|
||||||
|
|
||||||
export const version = 1000;
|
|
||||||
|
|
||||||
const READ_STATUS_UNREAD = sqlConstant(ReadStatus.Unread);
|
const READ_STATUS_UNREAD = sqlConstant(ReadStatus.Unread);
|
||||||
const READ_STATUS_READ = sqlConstant(ReadStatus.Read);
|
const READ_STATUS_READ = sqlConstant(ReadStatus.Read);
|
||||||
const SEEN_STATUS_UNSEEN = sqlConstant(SeenStatus.Unseen);
|
const SEEN_STATUS_UNSEEN = sqlConstant(SeenStatus.Unseen);
|
||||||
|
|
||||||
export function updateToSchemaVersion1000(
|
export default function updateToSchemaVersion1000(db: Database): void {
|
||||||
currentVersion: number,
|
const [selectQuery] = sql`
|
||||||
db: Database,
|
SELECT id
|
||||||
logger: LoggerType
|
FROM messages
|
||||||
): void {
|
WHERE messages.type = 'call-history'
|
||||||
if (currentVersion >= 1000) {
|
AND messages.readStatus IS ${READ_STATUS_UNREAD}
|
||||||
return;
|
`;
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
const rows = db.prepare(selectQuery).all();
|
||||||
const [selectQuery] = sql`
|
|
||||||
SELECT id
|
for (const row of rows) {
|
||||||
FROM messages
|
const { id } = row;
|
||||||
WHERE messages.type = 'call-history'
|
strictAssert(id != null, 'message id must exist');
|
||||||
AND messages.readStatus IS ${READ_STATUS_UNREAD}
|
|
||||||
|
const [updateQuery, updateParams] = sql`
|
||||||
|
UPDATE messages
|
||||||
|
SET
|
||||||
|
json = JSON_PATCH(json, ${JSON.stringify({
|
||||||
|
readStatus: ReadStatus.Read,
|
||||||
|
seenStatus: SeenStatus.Unseen,
|
||||||
|
})}),
|
||||||
|
readStatus = ${READ_STATUS_READ},
|
||||||
|
seenStatus = ${SEEN_STATUS_UNSEEN}
|
||||||
|
WHERE id = ${id}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const rows = db.prepare(selectQuery).all();
|
db.prepare(updateQuery).run(updateParams);
|
||||||
|
}
|
||||||
for (const row of rows) {
|
|
||||||
const { id } = row;
|
|
||||||
strictAssert(id != null, 'message id must exist');
|
|
||||||
|
|
||||||
const [updateQuery, updateParams] = sql`
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
json = JSON_PATCH(json, ${JSON.stringify({
|
|
||||||
readStatus: ReadStatus.Read,
|
|
||||||
seenStatus: SeenStatus.Unseen,
|
|
||||||
})}),
|
|
||||||
readStatus = ${READ_STATUS_READ},
|
|
||||||
seenStatus = ${SEEN_STATUS_UNSEEN}
|
|
||||||
WHERE id = ${id}
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.prepare(updateQuery).run(updateParams);
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 1000');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1000: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,38 +3,21 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1010;
|
export default function updateToSchemaVersion1010(db: Database): void {
|
||||||
|
const [createTable] = sql`
|
||||||
|
CREATE TABLE callLinks (
|
||||||
|
roomId TEXT NOT NULL PRIMARY KEY,
|
||||||
|
rootKey BLOB NOT NULL,
|
||||||
|
adminKey BLOB,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
-- Enum which stores CallLinkRestrictions from ringrtc
|
||||||
|
restrictions INTEGER NOT NULL,
|
||||||
|
revoked INTEGER NOT NULL,
|
||||||
|
expiration INTEGER
|
||||||
|
) STRICT;
|
||||||
|
`;
|
||||||
|
|
||||||
export function updateToSchemaVersion1010(
|
db.exec(createTable);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1010) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [createTable] = sql`
|
|
||||||
CREATE TABLE callLinks (
|
|
||||||
roomId TEXT NOT NULL PRIMARY KEY,
|
|
||||||
rootKey BLOB NOT NULL,
|
|
||||||
adminKey BLOB,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
-- Enum which stores CallLinkRestrictions from ringrtc
|
|
||||||
restrictions INTEGER NOT NULL,
|
|
||||||
revoked INTEGER NOT NULL,
|
|
||||||
expiration INTEGER
|
|
||||||
) STRICT;
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(createTable);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1010');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1010: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,54 +6,39 @@ import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
import { getOurUuid } from './41-uuid-keys';
|
import { getOurUuid } from './41-uuid-keys';
|
||||||
|
|
||||||
export const version = 1020;
|
export default function updateToSchemaVersion1020(
|
||||||
|
|
||||||
export function updateToSchemaVersion1020(
|
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 1020) {
|
const ourAci = getOurUuid(db);
|
||||||
|
|
||||||
|
if (ourAci == null) {
|
||||||
|
logger.info('not linked');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
const [selectQuery, selectParams] = sql`
|
||||||
const ourAci = getOurUuid(db);
|
SELECT id FROM conversations
|
||||||
|
WHERE serviceId IS ${ourAci}
|
||||||
|
`;
|
||||||
|
const ourConversationId = db
|
||||||
|
.prepare(selectQuery, {
|
||||||
|
pluck: true,
|
||||||
|
})
|
||||||
|
.get(selectParams);
|
||||||
|
if (ourConversationId == null) {
|
||||||
|
logger.error('no conversation');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (ourAci == null) {
|
const [deleteQuery, deleteParams] = sql`
|
||||||
logger.info('updateToSchemaVersion1020: not linked');
|
DELETE FROM messages
|
||||||
db.pragma('user_version = 1020');
|
WHERE
|
||||||
return;
|
conversationId IS ${ourConversationId} AND
|
||||||
}
|
type IS 'conversation-merge'
|
||||||
|
`;
|
||||||
const [selectQuery, selectParams] = sql`
|
const { changes } = db.prepare(deleteQuery).run(deleteParams);
|
||||||
SELECT id FROM conversations
|
if (changes !== 0) {
|
||||||
WHERE serviceId IS ${ourAci}
|
logger.warn(`removed ${changes} self merges`);
|
||||||
`;
|
}
|
||||||
const ourConversationId = db
|
|
||||||
.prepare(selectQuery, {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.get(selectParams);
|
|
||||||
if (ourConversationId == null) {
|
|
||||||
logger.error('updateToSchemaVersion1020: no conversation');
|
|
||||||
db.pragma('user_version = 1020');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [deleteQuery, deleteParams] = sql`
|
|
||||||
DELETE FROM messages
|
|
||||||
WHERE
|
|
||||||
conversationId IS ${ourConversationId} AND
|
|
||||||
type IS 'conversation-merge'
|
|
||||||
`;
|
|
||||||
const { changes } = db.prepare(deleteQuery).run(deleteParams);
|
|
||||||
if (changes !== 0) {
|
|
||||||
logger.warn(`updateToSchemaVersion1020: removed ${changes} self merges`);
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 1020');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1020: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,85 +3,68 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql, sqlFragment } from '../util';
|
import { sql, sqlFragment } from '../util';
|
||||||
|
|
||||||
export const version = 1030;
|
export default function updateToSchemaVersion1030(db: Database): void {
|
||||||
|
// From migration 81
|
||||||
|
const shouldAffectActivityOrPreview = sqlFragment`
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'contact-removed-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
AND NOT (
|
||||||
|
type IS 'message-request-response-event'
|
||||||
|
AND json_extract(json, '$.messageRequestResponseEvent') IN ('ACCEPT', 'BLOCK', 'UNBLOCK')
|
||||||
|
)
|
||||||
|
`;
|
||||||
|
|
||||||
export function updateToSchemaVersion1030(
|
const [updateShouldAffectPreview] = sql`
|
||||||
currentVersion: number,
|
--- These will be re-added below
|
||||||
db: Database,
|
DROP INDEX messages_preview;
|
||||||
logger: LoggerType
|
DROP INDEX messages_preview_without_story;
|
||||||
): void {
|
DROP INDEX messages_activity;
|
||||||
if (currentVersion >= 1030) {
|
DROP INDEX message_user_initiated;
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- These will also be re-added below
|
||||||
// From migration 81
|
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
||||||
const shouldAffectActivityOrPreview = sqlFragment`
|
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'contact-removed-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
AND NOT (
|
|
||||||
type IS 'message-request-response-event'
|
|
||||||
AND json_extract(json, '$.messageRequestResponseEvent') IN ('ACCEPT', 'BLOCK', 'UNBLOCK')
|
|
||||||
)
|
|
||||||
`;
|
|
||||||
|
|
||||||
const [updateShouldAffectPreview] = sql`
|
--- (change: added message-request-response-event->ACCEPT/BLOCK/UNBLOCK)
|
||||||
--- These will be re-added below
|
ALTER TABLE messages
|
||||||
DROP INDEX messages_preview;
|
ADD COLUMN shouldAffectPreview INTEGER
|
||||||
DROP INDEX messages_preview_without_story;
|
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
|
||||||
DROP INDEX messages_activity;
|
ALTER TABLE messages
|
||||||
DROP INDEX message_user_initiated;
|
ADD COLUMN shouldAffectActivity INTEGER
|
||||||
|
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
|
||||||
|
|
||||||
--- These will also be re-added below
|
--- From migration 88
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
CREATE INDEX messages_preview ON messages
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
|
received_at, sent_at);
|
||||||
|
|
||||||
--- (change: added message-request-response-event->ACCEPT/BLOCK/UNBLOCK)
|
--- From migration 88
|
||||||
ALTER TABLE messages
|
CREATE INDEX messages_preview_without_story ON messages
|
||||||
ADD COLUMN shouldAffectPreview INTEGER
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
|
received_at, sent_at) WHERE storyId IS NULL;
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN shouldAffectActivity INTEGER
|
|
||||||
GENERATED ALWAYS AS (${shouldAffectActivityOrPreview});
|
|
||||||
|
|
||||||
--- From migration 88
|
--- From migration 88
|
||||||
CREATE INDEX messages_preview ON messages
|
CREATE INDEX messages_activity ON messages
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
|
||||||
received_at, sent_at);
|
isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
|
|
||||||
--- From migration 88
|
--- From migration 81
|
||||||
CREATE INDEX messages_preview_without_story ON messages
|
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
`;
|
||||||
received_at, sent_at) WHERE storyId IS NULL;
|
|
||||||
|
|
||||||
--- From migration 88
|
db.exec(updateShouldAffectPreview);
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
|
|
||||||
isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
--- From migration 81
|
|
||||||
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(updateShouldAffectPreview);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1030');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1030: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,8 +20,6 @@ import {
|
||||||
type JobManagerJobType,
|
type JobManagerJobType,
|
||||||
} from '../../jobs/JobManager';
|
} from '../../jobs/JobManager';
|
||||||
|
|
||||||
export const version = 1040;
|
|
||||||
|
|
||||||
export type _AttachmentDownloadJobTypeV1030 = {
|
export type _AttachmentDownloadJobTypeV1030 = {
|
||||||
attachment: AttachmentType;
|
attachment: AttachmentType;
|
||||||
attempts: number;
|
attempts: number;
|
||||||
|
@ -56,192 +54,171 @@ export type _AttachmentDownloadJobTypeV1040 = Omit<
|
||||||
'attachmentSignature' | 'originalSource'
|
'attachmentSignature' | 'originalSource'
|
||||||
> & { digest: string };
|
> & { digest: string };
|
||||||
|
|
||||||
export function updateToSchemaVersion1040(
|
export default function updateToSchemaVersion1040(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 1040) {
|
// 1. Load all existing rows into memory (shouldn't be many)
|
||||||
return;
|
const existingJobs: Array<{
|
||||||
|
id: string | null;
|
||||||
|
timestamp: number | null;
|
||||||
|
pending: number | null;
|
||||||
|
json: string | null;
|
||||||
|
}> = db
|
||||||
|
.prepare(
|
||||||
|
`
|
||||||
|
SELECT id, timestamp, pending, json from attachment_downloads
|
||||||
|
`
|
||||||
|
)
|
||||||
|
.all();
|
||||||
|
logger.info(`loaded ${existingJobs.length} existing jobs`);
|
||||||
|
|
||||||
|
// 2. Create new temp table, with a couple new columns and stricter typing
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE tmp_attachment_downloads (
|
||||||
|
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
|
||||||
|
attachmentType TEXT NOT NULL,
|
||||||
|
digest TEXT NOT NULL,
|
||||||
|
receivedAt INTEGER NOT NULL,
|
||||||
|
sentAt INTEGER NOT NULL,
|
||||||
|
contentType TEXT NOT NULL,
|
||||||
|
size INTEGER NOT NULL,
|
||||||
|
attachmentJson TEXT NOT NULL,
|
||||||
|
active INTEGER NOT NULL,
|
||||||
|
attempts INTEGER NOT NULL,
|
||||||
|
retryAfter INTEGER,
|
||||||
|
lastAttemptTimestamp INTEGER,
|
||||||
|
|
||||||
|
PRIMARY KEY (messageId, attachmentType, digest)
|
||||||
|
) STRICT;
|
||||||
|
`);
|
||||||
|
|
||||||
|
// 3. Drop existing table
|
||||||
|
db.exec('DROP TABLE attachment_downloads;');
|
||||||
|
|
||||||
|
// 4. Rename temp table
|
||||||
|
db.exec(
|
||||||
|
'ALTER TABLE tmp_attachment_downloads RENAME TO attachment_downloads;'
|
||||||
|
);
|
||||||
|
|
||||||
|
// 5. Add new index on active & receivedAt. For most queries when there are lots of
|
||||||
|
// jobs (like during backup restore), many jobs will match the the WHERE clause, so
|
||||||
|
// the ORDER BY on receivedAt is probably the most expensive part.
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX attachment_downloads_active_receivedAt
|
||||||
|
ON attachment_downloads (
|
||||||
|
active, receivedAt
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// 6. Add new index on active & messageId. In order to prioritize visible messages,
|
||||||
|
// we'll also query for rows with a matching messageId. For these, the messageId
|
||||||
|
// matching is likely going to be the most expensive part.
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX attachment_downloads_active_messageId
|
||||||
|
ON attachment_downloads (
|
||||||
|
active, messageId
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// 7. Add new index just on messageId, for the ON DELETE CASCADE foreign key
|
||||||
|
// constraint
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX attachment_downloads_messageId
|
||||||
|
ON attachment_downloads (
|
||||||
|
messageId
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// 8. Rewrite old rows to match new schema
|
||||||
|
const rowsToTransfer: Array<
|
||||||
|
_AttachmentDownloadJobTypeV1040 & JobManagerJobType
|
||||||
|
> = [];
|
||||||
|
|
||||||
|
for (const existingJob of existingJobs) {
|
||||||
|
try {
|
||||||
|
// Type this as partial in case there is missing data
|
||||||
|
const existingJobData: Partial<_AttachmentDownloadJobTypeV1030> =
|
||||||
|
jsonToObject(existingJob.json ?? '');
|
||||||
|
|
||||||
|
const updatedJob: Partial<_AttachmentDownloadJobTypeV1040> = {
|
||||||
|
messageId: existingJobData.messageId,
|
||||||
|
attachmentType: existingJobData.type,
|
||||||
|
attachment: existingJobData.attachment,
|
||||||
|
// The existing timestamp column works reasonably well in place of
|
||||||
|
// actually retrieving the message's receivedAt
|
||||||
|
receivedAt: existingJobData.timestamp ?? Date.now(),
|
||||||
|
sentAt: existingJobData.timestamp ?? Date.now(),
|
||||||
|
digest: existingJobData.attachment?.digest,
|
||||||
|
contentType: existingJobData.attachment?.contentType,
|
||||||
|
size: existingJobData.attachment?.size,
|
||||||
|
active: false, // all jobs are inactive on app start
|
||||||
|
attempts: existingJobData.attempts ?? 0,
|
||||||
|
retryAfter: null,
|
||||||
|
lastAttemptTimestamp: null,
|
||||||
|
// adding due to changes in the schema
|
||||||
|
source: AttachmentDownloadSource.STANDARD,
|
||||||
|
ciphertextSize: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
const parsed = parsePartial(attachmentDownloadJobSchemaV1040, updatedJob);
|
||||||
|
|
||||||
|
rowsToTransfer.push(parsed);
|
||||||
|
} catch {
|
||||||
|
logger.warn(
|
||||||
|
`unable to transfer job ${existingJob.id} to new table; invalid data`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
let numTransferred = 0;
|
||||||
// 1. Load all existing rows into memory (shouldn't be many)
|
if (rowsToTransfer.length) {
|
||||||
const existingJobs: Array<{
|
logger.info(`transferring ${rowsToTransfer.length} rows`);
|
||||||
id: string | null;
|
for (const row of rowsToTransfer) {
|
||||||
timestamp: number | null;
|
const [insertQuery, insertParams] = sql`
|
||||||
pending: number | null;
|
INSERT INTO attachment_downloads
|
||||||
json: string | null;
|
(
|
||||||
}> = db
|
messageId,
|
||||||
.prepare(
|
attachmentType,
|
||||||
`
|
receivedAt,
|
||||||
SELECT id, timestamp, pending, json from attachment_downloads
|
sentAt,
|
||||||
`
|
digest,
|
||||||
)
|
contentType,
|
||||||
.all();
|
size,
|
||||||
logger.info(
|
attachmentJson,
|
||||||
`updateToSchemaVersion1040: loaded ${existingJobs.length} existing jobs`
|
active,
|
||||||
);
|
attempts,
|
||||||
|
retryAfter,
|
||||||
// 2. Create new temp table, with a couple new columns and stricter typing
|
lastAttemptTimestamp
|
||||||
db.exec(`
|
)
|
||||||
CREATE TABLE tmp_attachment_downloads (
|
VALUES
|
||||||
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
|
(
|
||||||
attachmentType TEXT NOT NULL,
|
${row.messageId},
|
||||||
digest TEXT NOT NULL,
|
${row.attachmentType},
|
||||||
receivedAt INTEGER NOT NULL,
|
${row.receivedAt},
|
||||||
sentAt INTEGER NOT NULL,
|
${row.sentAt},
|
||||||
contentType TEXT NOT NULL,
|
${row.digest},
|
||||||
size INTEGER NOT NULL,
|
${row.contentType},
|
||||||
attachmentJson TEXT NOT NULL,
|
${row.size},
|
||||||
active INTEGER NOT NULL,
|
${objectToJSON(row.attachment)},
|
||||||
attempts INTEGER NOT NULL,
|
${row.active ? 1 : 0},
|
||||||
retryAfter INTEGER,
|
${row.attempts},
|
||||||
lastAttemptTimestamp INTEGER,
|
${row.retryAfter},
|
||||||
|
${row.lastAttemptTimestamp}
|
||||||
PRIMARY KEY (messageId, attachmentType, digest)
|
|
||||||
) STRICT;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 3. Drop existing table
|
|
||||||
db.exec('DROP TABLE attachment_downloads;');
|
|
||||||
|
|
||||||
// 4. Rename temp table
|
|
||||||
db.exec(
|
|
||||||
'ALTER TABLE tmp_attachment_downloads RENAME TO attachment_downloads;'
|
|
||||||
);
|
|
||||||
|
|
||||||
// 5. Add new index on active & receivedAt. For most queries when there are lots of
|
|
||||||
// jobs (like during backup restore), many jobs will match the the WHERE clause, so
|
|
||||||
// the ORDER BY on receivedAt is probably the most expensive part.
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX attachment_downloads_active_receivedAt
|
|
||||||
ON attachment_downloads (
|
|
||||||
active, receivedAt
|
|
||||||
);
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 6. Add new index on active & messageId. In order to prioritize visible messages,
|
|
||||||
// we'll also query for rows with a matching messageId. For these, the messageId
|
|
||||||
// matching is likely going to be the most expensive part.
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX attachment_downloads_active_messageId
|
|
||||||
ON attachment_downloads (
|
|
||||||
active, messageId
|
|
||||||
);
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 7. Add new index just on messageId, for the ON DELETE CASCADE foreign key
|
|
||||||
// constraint
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX attachment_downloads_messageId
|
|
||||||
ON attachment_downloads (
|
|
||||||
messageId
|
|
||||||
);
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 8. Rewrite old rows to match new schema
|
|
||||||
const rowsToTransfer: Array<
|
|
||||||
_AttachmentDownloadJobTypeV1040 & JobManagerJobType
|
|
||||||
> = [];
|
|
||||||
|
|
||||||
for (const existingJob of existingJobs) {
|
|
||||||
try {
|
|
||||||
// Type this as partial in case there is missing data
|
|
||||||
const existingJobData: Partial<_AttachmentDownloadJobTypeV1030> =
|
|
||||||
jsonToObject(existingJob.json ?? '');
|
|
||||||
|
|
||||||
const updatedJob: Partial<_AttachmentDownloadJobTypeV1040> = {
|
|
||||||
messageId: existingJobData.messageId,
|
|
||||||
attachmentType: existingJobData.type,
|
|
||||||
attachment: existingJobData.attachment,
|
|
||||||
// The existing timestamp column works reasonably well in place of
|
|
||||||
// actually retrieving the message's receivedAt
|
|
||||||
receivedAt: existingJobData.timestamp ?? Date.now(),
|
|
||||||
sentAt: existingJobData.timestamp ?? Date.now(),
|
|
||||||
digest: existingJobData.attachment?.digest,
|
|
||||||
contentType: existingJobData.attachment?.contentType,
|
|
||||||
size: existingJobData.attachment?.size,
|
|
||||||
active: false, // all jobs are inactive on app start
|
|
||||||
attempts: existingJobData.attempts ?? 0,
|
|
||||||
retryAfter: null,
|
|
||||||
lastAttemptTimestamp: null,
|
|
||||||
// adding due to changes in the schema
|
|
||||||
source: AttachmentDownloadSource.STANDARD,
|
|
||||||
ciphertextSize: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
const parsed = parsePartial(
|
|
||||||
attachmentDownloadJobSchemaV1040,
|
|
||||||
updatedJob
|
|
||||||
);
|
|
||||||
|
|
||||||
rowsToTransfer.push(parsed);
|
|
||||||
} catch {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion1040: unable to transfer job ${existingJob.id} to new table; invalid data`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let numTransferred = 0;
|
|
||||||
if (rowsToTransfer.length) {
|
|
||||||
logger.info(
|
|
||||||
`updateToSchemaVersion1040: transferring ${rowsToTransfer.length} rows`
|
|
||||||
);
|
|
||||||
for (const row of rowsToTransfer) {
|
|
||||||
const [insertQuery, insertParams] = sql`
|
|
||||||
INSERT INTO attachment_downloads
|
|
||||||
(
|
|
||||||
messageId,
|
|
||||||
attachmentType,
|
|
||||||
receivedAt,
|
|
||||||
sentAt,
|
|
||||||
digest,
|
|
||||||
contentType,
|
|
||||||
size,
|
|
||||||
attachmentJson,
|
|
||||||
active,
|
|
||||||
attempts,
|
|
||||||
retryAfter,
|
|
||||||
lastAttemptTimestamp
|
|
||||||
)
|
|
||||||
VALUES
|
|
||||||
(
|
|
||||||
${row.messageId},
|
|
||||||
${row.attachmentType},
|
|
||||||
${row.receivedAt},
|
|
||||||
${row.sentAt},
|
|
||||||
${row.digest},
|
|
||||||
${row.contentType},
|
|
||||||
${row.size},
|
|
||||||
${objectToJSON(row.attachment)},
|
|
||||||
${row.active ? 1 : 0},
|
|
||||||
${row.attempts},
|
|
||||||
${row.retryAfter},
|
|
||||||
${row.lastAttemptTimestamp}
|
|
||||||
);
|
|
||||||
`;
|
|
||||||
try {
|
|
||||||
db.prepare(insertQuery).run(insertParams);
|
|
||||||
numTransferred += 1;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(
|
|
||||||
'updateToSchemaVersion1040: error when transferring row',
|
|
||||||
error
|
|
||||||
);
|
);
|
||||||
}
|
`;
|
||||||
|
try {
|
||||||
|
db.prepare(insertQuery).run(insertParams);
|
||||||
|
numTransferred += 1;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('error when transferring row', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`updateToSchemaVersion1040: transferred ${numTransferred} rows, removed ${
|
`transferred ${numTransferred} rows, removed ${
|
||||||
existingJobs.length - numTransferred
|
existingJobs.length - numTransferred
|
||||||
}`
|
}`
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 1040');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1040: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,48 +3,31 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1050;
|
export default function updateToSchemaVersion1050(db: Database): void {
|
||||||
|
const [createTables] = sql`
|
||||||
|
DROP TABLE IF EXISTS groupSendCombinedEndorsement;
|
||||||
|
DROP TABLE IF EXISTS groupSendMemberEndorsement;
|
||||||
|
|
||||||
export function updateToSchemaVersion1050(
|
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
|
||||||
currentVersion: number,
|
-- this is the combined endorsement for all group members
|
||||||
db: Database,
|
CREATE TABLE groupSendCombinedEndorsement (
|
||||||
logger: LoggerType
|
groupId TEXT NOT NULL PRIMARY KEY, -- Only one endorsement per group
|
||||||
): void {
|
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
|
||||||
if (currentVersion >= 1050) {
|
endorsement BLOB NOT NULL
|
||||||
return;
|
) STRICT;
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
|
||||||
const [createTables] = sql`
|
-- these are the individual endorsements for each group member
|
||||||
DROP TABLE IF EXISTS groupSendCombinedEndorsement;
|
CREATE TABLE groupSendMemberEndorsement (
|
||||||
DROP TABLE IF EXISTS groupSendMemberEndorsement;
|
groupId TEXT NOT NULL,
|
||||||
|
memberAci TEXT NOT NULL,
|
||||||
|
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
|
||||||
|
endorsement BLOB NOT NULL,
|
||||||
|
PRIMARY KEY (groupId, memberAci) -- Only one endorsement per group member
|
||||||
|
) STRICT;
|
||||||
|
`;
|
||||||
|
|
||||||
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
|
db.exec(createTables);
|
||||||
-- this is the combined endorsement for all group members
|
|
||||||
CREATE TABLE groupSendCombinedEndorsement (
|
|
||||||
groupId TEXT NOT NULL PRIMARY KEY, -- Only one endorsement per group
|
|
||||||
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
|
|
||||||
endorsement BLOB NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
-- From GroupSendEndorsementsResponse->ReceivedEndorsements in libsignal
|
|
||||||
-- these are the individual endorsements for each group member
|
|
||||||
CREATE TABLE groupSendMemberEndorsement (
|
|
||||||
groupId TEXT NOT NULL,
|
|
||||||
memberAci TEXT NOT NULL,
|
|
||||||
expiration INTEGER NOT NULL, -- Unix timestamp in seconds
|
|
||||||
endorsement BLOB NOT NULL,
|
|
||||||
PRIMARY KEY (groupId, memberAci) -- Only one endorsement per group member
|
|
||||||
) STRICT;
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(createTables);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1050');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1050: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,54 +3,36 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1060(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export const version = 1060;
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isAddressableMessage INTEGER
|
||||||
export function updateToSchemaVersion1060(
|
GENERATED ALWAYS AS (
|
||||||
currentVersion: number,
|
type IS NULL
|
||||||
db: Database,
|
OR
|
||||||
logger: LoggerType
|
type IN (
|
||||||
): void {
|
'incoming',
|
||||||
if (currentVersion >= 1060) {
|
'outgoing'
|
||||||
return;
|
)
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isAddressableMessage INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type IN (
|
|
||||||
'incoming',
|
|
||||||
'outgoing'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX messages_by_date_addressable
|
|
||||||
ON messages (
|
|
||||||
conversationId, isAddressableMessage, received_at, sent_at
|
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE syncTasks(
|
CREATE INDEX messages_by_date_addressable
|
||||||
id TEXT PRIMARY KEY NOT NULL,
|
ON messages (
|
||||||
attempts INTEGER NOT NULL,
|
conversationId, isAddressableMessage, received_at, sent_at
|
||||||
createdAt INTEGER NOT NULL,
|
);
|
||||||
data TEXT NOT NULL,
|
|
||||||
envelopeId TEXT NOT NULL,
|
|
||||||
sentAt INTEGER NOT NULL,
|
|
||||||
type TEXT NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE INDEX syncTasks_order ON syncTasks (
|
CREATE TABLE syncTasks(
|
||||||
createdAt, sentAt, id
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
)
|
attempts INTEGER NOT NULL,
|
||||||
`);
|
createdAt INTEGER NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
envelopeId TEXT NOT NULL,
|
||||||
|
sentAt INTEGER NOT NULL,
|
||||||
|
type TEXT NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
db.pragma('user_version = 1060');
|
CREATE INDEX syncTasks_order ON syncTasks (
|
||||||
})();
|
createdAt, sentAt, id
|
||||||
|
)
|
||||||
logger.info('updateToSchemaVersion1060: success!');
|
`);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,53 +3,35 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1070(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE attachment_backup_jobs (
|
||||||
|
mediaName TEXT NOT NULL PRIMARY KEY,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
receivedAt INTEGER NOT NULL,
|
||||||
|
|
||||||
export const version = 1070;
|
-- job manager fields
|
||||||
|
attempts INTEGER NOT NULL,
|
||||||
|
active INTEGER NOT NULL,
|
||||||
|
retryAfter INTEGER,
|
||||||
|
lastAttemptTimestamp INTEGER
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
export function updateToSchemaVersion1070(
|
CREATE INDEX attachment_backup_jobs_receivedAt
|
||||||
currentVersion: number,
|
ON attachment_backup_jobs (
|
||||||
db: Database,
|
receivedAt
|
||||||
logger: LoggerType
|
);
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1070) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
CREATE INDEX attachment_backup_jobs_type_receivedAt
|
||||||
db.exec(`
|
ON attachment_backup_jobs (
|
||||||
CREATE TABLE attachment_backup_jobs (
|
type, receivedAt
|
||||||
mediaName TEXT NOT NULL PRIMARY KEY,
|
);
|
||||||
type TEXT NOT NULL,
|
|
||||||
data TEXT NOT NULL,
|
|
||||||
receivedAt INTEGER NOT NULL,
|
|
||||||
|
|
||||||
-- job manager fields
|
CREATE TABLE backup_cdn_object_metadata (
|
||||||
attempts INTEGER NOT NULL,
|
mediaId TEXT NOT NULL PRIMARY KEY,
|
||||||
active INTEGER NOT NULL,
|
cdnNumber INTEGER NOT NULL,
|
||||||
retryAfter INTEGER,
|
sizeOnBackupCdn INTEGER
|
||||||
lastAttemptTimestamp INTEGER
|
) STRICT;
|
||||||
) STRICT;
|
`);
|
||||||
|
|
||||||
CREATE INDEX attachment_backup_jobs_receivedAt
|
|
||||||
ON attachment_backup_jobs (
|
|
||||||
receivedAt
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX attachment_backup_jobs_type_receivedAt
|
|
||||||
ON attachment_backup_jobs (
|
|
||||||
type, receivedAt
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE backup_cdn_object_metadata (
|
|
||||||
mediaId TEXT NOT NULL PRIMARY KEY,
|
|
||||||
cdnNumber INTEGER NOT NULL,
|
|
||||||
sizeOnBackupCdn INTEGER
|
|
||||||
) STRICT;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1070');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1070: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,29 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1080(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export const version = 1080;
|
CREATE INDEX messages_by_date_addressable_nondisappearing
|
||||||
|
ON messages (
|
||||||
export function updateToSchemaVersion1080(
|
conversationId, isAddressableMessage, received_at, sent_at
|
||||||
currentVersion: number,
|
) WHERE expireTimer IS NULL;
|
||||||
db: Database,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1080) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX messages_by_date_addressable_nondisappearing
|
|
||||||
ON messages (
|
|
||||||
conversationId, isAddressableMessage, received_at, sent_at
|
|
||||||
) WHERE expireTimer IS NULL;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1080');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1080: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,30 +3,12 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1090(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX reactions_messageId
|
||||||
|
ON reactions (messageId);
|
||||||
|
|
||||||
export const version = 1090;
|
CREATE INDEX storyReads_storyId
|
||||||
|
ON storyReads (storyId);
|
||||||
export function updateToSchemaVersion1090(
|
`);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1090) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX reactions_messageId
|
|
||||||
ON reactions (messageId);
|
|
||||||
|
|
||||||
CREATE INDEX storyReads_storyId
|
|
||||||
ON storyReads (storyId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1090');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1090: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,62 +2,45 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1100;
|
export default function updateToSchemaVersion1100(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
-- Fix: Query went from readStatus to seenStatus but index wasn't updated
|
||||||
|
DROP INDEX IF EXISTS messages_callHistory_readStatus;
|
||||||
|
DROP INDEX IF EXISTS messages_callHistory_seenStatus;
|
||||||
|
CREATE INDEX messages_callHistory_seenStatus
|
||||||
|
ON messages (type, seenStatus)
|
||||||
|
WHERE type IS 'call-history';
|
||||||
|
|
||||||
export function updateToSchemaVersion1100(
|
-- Update to index created in 89: add sent_at to make it covering, and where clause to make it smaller
|
||||||
currentVersion: number,
|
DROP INDEX IF EXISTS messages_call;
|
||||||
db: Database,
|
CREATE INDEX messages_call ON messages
|
||||||
logger: LoggerType
|
(type, conversationId, callId, sent_at)
|
||||||
): void {
|
WHERE type IS 'call-history';
|
||||||
if (currentVersion >= 1100) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
-- Update to index created in 89: add callId and peerId to make it covering
|
||||||
const [query] = sql`
|
DROP INDEX IF EXISTS callsHistory_order;
|
||||||
-- Fix: Query went from readStatus to seenStatus but index wasn't updated
|
CREATE INDEX callsHistory_order ON callsHistory
|
||||||
DROP INDEX IF EXISTS messages_callHistory_readStatus;
|
(timestamp DESC, callId, peerId);
|
||||||
DROP INDEX IF EXISTS messages_callHistory_seenStatus;
|
|
||||||
CREATE INDEX messages_callHistory_seenStatus
|
|
||||||
ON messages (type, seenStatus)
|
|
||||||
WHERE type IS 'call-history';
|
|
||||||
|
|
||||||
-- Update to index created in 89: add sent_at to make it covering, and where clause to make it smaller
|
-- Update to index created in 89: add timestamp for querying by order and callId to make it covering
|
||||||
DROP INDEX IF EXISTS messages_call;
|
DROP INDEX IF EXISTS callsHistory_byConversation;
|
||||||
CREATE INDEX messages_call ON messages
|
DROP INDEX IF EXISTS callsHistory_byConversation_order;
|
||||||
(type, conversationId, callId, sent_at)
|
CREATE INDEX callsHistory_byConversation_order ON callsHistory (peerId, timestamp DESC, callId);
|
||||||
WHERE type IS 'call-history';
|
|
||||||
|
|
||||||
-- Update to index created in 89: add callId and peerId to make it covering
|
-- Optimize markAllCallHistoryRead
|
||||||
DROP INDEX IF EXISTS callsHistory_order;
|
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
|
||||||
CREATE INDEX callsHistory_order ON callsHistory
|
CREATE INDEX messages_callHistory_markReadBefore
|
||||||
(timestamp DESC, callId, peerId);
|
ON messages (type, seenStatus, sent_at DESC)
|
||||||
|
WHERE type IS 'call-history';
|
||||||
|
|
||||||
-- Update to index created in 89: add timestamp for querying by order and callId to make it covering
|
-- Optimize markAllCallHistoryReadInConversation
|
||||||
DROP INDEX IF EXISTS callsHistory_byConversation;
|
DROP INDEX IF EXISTS messages_callHistory_markReadByConversationBefore;
|
||||||
DROP INDEX IF EXISTS callsHistory_byConversation_order;
|
CREATE INDEX messages_callHistory_markReadByConversationBefore
|
||||||
CREATE INDEX callsHistory_byConversation_order ON callsHistory (peerId, timestamp DESC, callId);
|
ON messages (type, conversationId, seenStatus, sent_at DESC)
|
||||||
|
WHERE type IS 'call-history';
|
||||||
|
`;
|
||||||
|
|
||||||
-- Optimize markAllCallHistoryRead
|
db.exec(query);
|
||||||
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
|
|
||||||
CREATE INDEX messages_callHistory_markReadBefore
|
|
||||||
ON messages (type, seenStatus, sent_at DESC)
|
|
||||||
WHERE type IS 'call-history';
|
|
||||||
|
|
||||||
-- Optimize markAllCallHistoryReadInConversation
|
|
||||||
DROP INDEX IF EXISTS messages_callHistory_markReadByConversationBefore;
|
|
||||||
CREATE INDEX messages_callHistory_markReadByConversationBefore
|
|
||||||
ON messages (type, conversationId, seenStatus, sent_at DESC)
|
|
||||||
WHERE type IS 'call-history';
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1100');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1100: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,33 +3,15 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1110(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
ALTER TABLE stickers
|
||||||
|
ADD COLUMN version INTEGER NOT NULL DEFAULT 1;
|
||||||
|
|
||||||
export const version = 1110;
|
ALTER TABLE stickers
|
||||||
|
ADD COLUMN localKey TEXT;
|
||||||
|
|
||||||
export function updateToSchemaVersion1110(
|
ALTER TABLE stickers
|
||||||
currentVersion: number,
|
ADD COLUMN size INTEGER;
|
||||||
db: Database,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1110) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE stickers
|
|
||||||
ADD COLUMN version INTEGER NOT NULL DEFAULT 1;
|
|
||||||
|
|
||||||
ALTER TABLE stickers
|
|
||||||
ADD COLUMN localKey TEXT;
|
|
||||||
|
|
||||||
ALTER TABLE stickers
|
|
||||||
ADD COLUMN size INTEGER;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1110');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1110: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,31 +3,13 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1120(db: Database): void {
|
||||||
|
/** Adds indexes for all tables with foreign key relationships to messages(id) */
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX edited_messages_messageId
|
||||||
|
ON edited_messages(messageId);
|
||||||
|
|
||||||
export const version = 1120;
|
CREATE INDEX mentions_messageId
|
||||||
|
ON mentions(messageId);
|
||||||
export function updateToSchemaVersion1120(
|
`);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1120) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
/** Adds indexes for all tables with foreign key relationships to messages(id) */
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX edited_messages_messageId
|
|
||||||
ON edited_messages(messageId);
|
|
||||||
|
|
||||||
CREATE INDEX mentions_messageId
|
|
||||||
ON mentions(messageId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1120');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1120: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,29 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion1130(db: Database): void {
|
||||||
|
// This is to improve the performance of getAllStories
|
||||||
export const version = 1130;
|
db.exec(`
|
||||||
|
CREATE INDEX messages_isStory
|
||||||
export function updateToSchemaVersion1130(
|
ON messages(received_at, sent_at)
|
||||||
currentVersion: number,
|
WHERE isStory = 1;
|
||||||
db: Database,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1130) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// This is to improve the performance of getAllStories
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX messages_isStory
|
|
||||||
ON messages(received_at, sent_at)
|
|
||||||
WHERE isStory = 1;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1130');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1130: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,31 +1,15 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1140;
|
export default function updateToSchemaVersion1140(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
DROP INDEX IF EXISTS callLinks_deleted;
|
||||||
|
|
||||||
export function updateToSchemaVersion1140(
|
ALTER TABLE callLinks
|
||||||
currentVersion: number,
|
ADD COLUMN deleted INTEGER NOT NULL DEFAULT 0;
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1140) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
CREATE INDEX callLinks_deleted
|
||||||
db.exec(`
|
ON callLinks (deleted, roomId);
|
||||||
DROP INDEX IF EXISTS callLinks_deleted;
|
`);
|
||||||
|
|
||||||
ALTER TABLE callLinks
|
|
||||||
ADD COLUMN deleted INTEGER NOT NULL DEFAULT 0;
|
|
||||||
|
|
||||||
CREATE INDEX callLinks_deleted
|
|
||||||
ON callLinks (deleted, roomId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1140');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1140: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,30 +1,14 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1150;
|
export default function updateToSchemaVersion1150(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
-- All future conversations will start from '1'
|
||||||
|
ALTER TABLE conversations
|
||||||
|
ADD COLUMN expireTimerVersion INTEGER NOT NULL DEFAULT 1;
|
||||||
|
|
||||||
export function updateToSchemaVersion1150(
|
-- All current conversations will start from '2'
|
||||||
currentVersion: number,
|
UPDATE conversations SET expireTimerVersion = 2;
|
||||||
db: Database,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1150) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
-- All future conversations will start from '1'
|
|
||||||
ALTER TABLE conversations
|
|
||||||
ADD COLUMN expireTimerVersion INTEGER NOT NULL DEFAULT 1;
|
|
||||||
|
|
||||||
-- All current conversations will start from '2'
|
|
||||||
UPDATE conversations SET expireTimerVersion = 2;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1150');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1150: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,36 +1,20 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql, sqlConstant } from '../util';
|
import { sql, sqlConstant } from '../util';
|
||||||
import { CallDirection, CallStatusValue } from '../../types/CallDisposition';
|
import { CallDirection, CallStatusValue } from '../../types/CallDisposition';
|
||||||
|
|
||||||
export const version = 1160;
|
|
||||||
|
|
||||||
const CALL_STATUS_MISSED = sqlConstant(CallStatusValue.Missed);
|
const CALL_STATUS_MISSED = sqlConstant(CallStatusValue.Missed);
|
||||||
const CALL_DIRECTION_INCOMING = sqlConstant(CallDirection.Incoming);
|
const CALL_DIRECTION_INCOMING = sqlConstant(CallDirection.Incoming);
|
||||||
|
|
||||||
export function updateToSchemaVersion1160(
|
export default function updateToSchemaVersion1160(db: Database): void {
|
||||||
currentVersion: number,
|
const [query] = sql`
|
||||||
db: Database,
|
DROP INDEX IF EXISTS callsHistory_incoming_missed;
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1160) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
CREATE INDEX callsHistory_incoming_missed
|
||||||
const [query] = sql`
|
ON callsHistory (callId, status, direction)
|
||||||
DROP INDEX IF EXISTS callsHistory_incoming_missed;
|
WHERE status IS ${CALL_STATUS_MISSED}
|
||||||
|
AND direction IS ${CALL_DIRECTION_INCOMING};
|
||||||
CREATE INDEX callsHistory_incoming_missed
|
`;
|
||||||
ON callsHistory (callId, status, direction)
|
db.exec(query);
|
||||||
WHERE status IS ${CALL_STATUS_MISSED}
|
|
||||||
AND direction IS ${CALL_DIRECTION_INCOMING};
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1160');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1160: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +1,14 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1170;
|
export default function updateToSchemaVersion1170(db: Database): void {
|
||||||
export function updateToSchemaVersion1170(
|
const [query] = sql`
|
||||||
currentVersion: number,
|
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
|
||||||
db: Database,
|
CREATE INDEX messages_callHistory_markReadBefore
|
||||||
logger: LoggerType
|
ON messages (type, seenStatus, received_at DESC)
|
||||||
): void {
|
WHERE type IS 'call-history';
|
||||||
if (currentVersion >= 1170) {
|
`;
|
||||||
return;
|
db.exec(query);
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [query] = sql`
|
|
||||||
DROP INDEX IF EXISTS messages_callHistory_markReadBefore;
|
|
||||||
CREATE INDEX messages_callHistory_markReadBefore
|
|
||||||
ON messages (type, seenStatus, received_at DESC)
|
|
||||||
WHERE type IS 'call-history';
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1170');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1170: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +1,22 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { AttachmentDownloadSource } from '../Interface';
|
import { AttachmentDownloadSource } from '../Interface';
|
||||||
|
|
||||||
export const version = 1180;
|
export default function updateToSchemaVersion1180(db: Database): void {
|
||||||
export function updateToSchemaVersion1180(
|
db.exec(`
|
||||||
currentVersion: number,
|
ALTER TABLE attachment_downloads
|
||||||
db: Database,
|
ADD COLUMN source TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1180) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE attachment_downloads
|
||||||
db.exec(`
|
-- this default value will be overridden by getNextAttachmentDownloadJobs
|
||||||
ALTER TABLE attachment_downloads
|
ADD COLUMN ciphertextSize INTEGER NOT NULL DEFAULT 0;
|
||||||
ADD COLUMN source TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
|
`);
|
||||||
|
|
||||||
ALTER TABLE attachment_downloads
|
|
||||||
-- this default value will be overridden by getNextAttachmentDownloadJobs
|
|
||||||
ADD COLUMN ciphertextSize INTEGER NOT NULL DEFAULT 0;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.exec(`
|
db.exec(`
|
||||||
CREATE INDEX attachment_downloads_source_ciphertextSize
|
CREATE INDEX attachment_downloads_source_ciphertextSize
|
||||||
ON attachment_downloads (
|
ON attachment_downloads (
|
||||||
source, ciphertextSize
|
source, ciphertextSize
|
||||||
);
|
);
|
||||||
`);
|
`);
|
||||||
|
|
||||||
db.pragma('user_version = 1180');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1180: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,38 +1,22 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1190;
|
export default function updateToSchemaVersion1190(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export function updateToSchemaVersion1190(
|
ALTER TABLE callLinks ADD COLUMN storageID TEXT;
|
||||||
currentVersion: number,
|
ALTER TABLE callLinks ADD COLUMN storageVersion INTEGER;
|
||||||
db: Database,
|
ALTER TABLE callLinks ADD COLUMN storageUnknownFields BLOB;
|
||||||
logger: LoggerType
|
ALTER TABLE callLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
|
||||||
): void {
|
ALTER TABLE callLinks ADD COLUMN deletedAt INTEGER;
|
||||||
if (currentVersion >= 1190) {
|
`);
|
||||||
return;
|
db.prepare(
|
||||||
}
|
`
|
||||||
|
UPDATE callLinks
|
||||||
db.transaction(() => {
|
SET deletedAt = $deletedAt
|
||||||
db.exec(`
|
WHERE deleted = 1;
|
||||||
ALTER TABLE callLinks ADD COLUMN storageID TEXT;
|
`
|
||||||
ALTER TABLE callLinks ADD COLUMN storageVersion INTEGER;
|
).run({
|
||||||
ALTER TABLE callLinks ADD COLUMN storageUnknownFields BLOB;
|
deletedAt: new Date().getTime(),
|
||||||
ALTER TABLE callLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
|
});
|
||||||
ALTER TABLE callLinks ADD COLUMN deletedAt INTEGER;
|
|
||||||
`);
|
|
||||||
db.prepare(
|
|
||||||
`
|
|
||||||
UPDATE callLinks
|
|
||||||
SET deletedAt = $deletedAt
|
|
||||||
WHERE deleted = 1;
|
|
||||||
`
|
|
||||||
).run({
|
|
||||||
deletedAt: new Date().getTime(),
|
|
||||||
});
|
|
||||||
|
|
||||||
db.pragma('user_version = 1190');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1190: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +1,14 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1200;
|
export default function updateToSchemaVersion1200(db: Database): void {
|
||||||
export function updateToSchemaVersion1200(
|
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
|
||||||
currentVersion: number,
|
// ordered by received_at
|
||||||
db: Database,
|
db.exec(`
|
||||||
logger: LoggerType
|
CREATE INDEX attachment_downloads_active_source_receivedAt
|
||||||
): void {
|
ON attachment_downloads (
|
||||||
if (currentVersion >= 1200) {
|
active, source, receivedAt
|
||||||
return;
|
);
|
||||||
}
|
`);
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
|
|
||||||
// ordered by received_at
|
|
||||||
db.exec(`
|
|
||||||
CREATE INDEX attachment_downloads_active_source_receivedAt
|
|
||||||
ON attachment_downloads (
|
|
||||||
active, source, receivedAt
|
|
||||||
);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1200');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1200: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,30 +1,15 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1210;
|
export default function updateToSchemaVersion1210(db: Database): void {
|
||||||
export function updateToSchemaVersion1210(
|
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
|
||||||
currentVersion: number,
|
// ordered by received_at
|
||||||
db: Database,
|
db.exec(`
|
||||||
logger: LoggerType
|
ALTER TABLE callsHistory
|
||||||
): void {
|
ADD COLUMN startedById TEXT DEFAULT NULL;
|
||||||
if (currentVersion >= 1210) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE callsHistory
|
||||||
// The standard getNextAttachmentDownloadJobs query uses active & source conditions,
|
ADD COLUMN endedTimestamp INTEGER DEFAULT NULL;
|
||||||
// ordered by received_at
|
`);
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE callsHistory
|
|
||||||
ADD COLUMN startedById TEXT DEFAULT NULL;
|
|
||||||
|
|
||||||
ALTER TABLE callsHistory
|
|
||||||
ADD COLUMN endedTimestamp INTEGER DEFAULT NULL;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1210');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1210: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,100 +120,84 @@ function migrateSession(
|
||||||
throw missingCaseError(session.version);
|
throw missingCaseError(session.version);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function updateToSchemaVersion1220(
|
export default function updateToSchemaVersion1220(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 1220) {
|
db.exec(`
|
||||||
|
ALTER TABLE sessions
|
||||||
|
RENAME TO old_sessions;
|
||||||
|
|
||||||
|
CREATE TABLE sessions (
|
||||||
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
ourServiceId TEXT NOT NULL,
|
||||||
|
serviceId TEXT NOT NULL,
|
||||||
|
conversationId TEXT NOT NULL,
|
||||||
|
deviceId INTEGER NOT NULL,
|
||||||
|
record BLOB NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
`);
|
||||||
|
|
||||||
|
const getItem = db.prepare(
|
||||||
|
`
|
||||||
|
SELECT json -> '$.value' FROM items WHERE id IS ?
|
||||||
|
`,
|
||||||
|
{
|
||||||
|
pluck: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const identityKeyMapJson = getItem.get<string>(['identityKeyMap']);
|
||||||
|
const registrationIdMapJson = getItem.get<string>(['registrationIdMap']);
|
||||||
|
|
||||||
|
// If we don't have private keys - the sessions cannot be used anyway
|
||||||
|
if (!identityKeyMapJson || !registrationIdMapJson) {
|
||||||
|
logger.info('no identity/registration id');
|
||||||
|
db.exec('DROP TABLE old_sessions');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
const identityKeyMap = identityKeyMapSchema.parse(
|
||||||
db.exec(`
|
JSON.parse(identityKeyMapJson)
|
||||||
ALTER TABLE sessions
|
);
|
||||||
RENAME TO old_sessions;
|
const registrationIdMap = registrationIdMapSchema.parse(
|
||||||
|
JSON.parse(registrationIdMapJson)
|
||||||
|
);
|
||||||
|
|
||||||
CREATE TABLE sessions (
|
const getSessionsPage = db.prepare(
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
'DELETE FROM old_sessions RETURNING * LIMIT 1000'
|
||||||
ourServiceId TEXT NOT NULL,
|
);
|
||||||
serviceId TEXT NOT NULL,
|
const insertSession = db.prepare(`
|
||||||
conversationId TEXT NOT NULL,
|
INSERT INTO sessions
|
||||||
deviceId INTEGER NOT NULL,
|
(id, ourServiceId, serviceId, conversationId, deviceId, record)
|
||||||
record BLOB NOT NULL
|
VALUES
|
||||||
) STRICT;
|
($id, $ourServiceId, $serviceId, $conversationId, $deviceId, $record)
|
||||||
`);
|
`);
|
||||||
|
|
||||||
const getItem = db.prepare(
|
let migrated = 0;
|
||||||
`
|
let failed = 0;
|
||||||
SELECT json -> '$.value' FROM items WHERE id IS ?
|
|
||||||
`,
|
|
||||||
{
|
|
||||||
pluck: true,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const identityKeyMapJson = getItem.get<string>(['identityKeyMap']);
|
// eslint-disable-next-line no-constant-condition
|
||||||
const registrationIdMapJson = getItem.get<string>(['registrationIdMap']);
|
while (true) {
|
||||||
|
const rows: Array<PreviousSessionRowType> = getSessionsPage.all();
|
||||||
// If we don't have private keys - the sessions cannot be used anyway
|
if (rows.length === 0) {
|
||||||
if (!identityKeyMapJson || !registrationIdMapJson) {
|
break;
|
||||||
logger.info('updateToSchemaVersion1220: no identity/registration id');
|
|
||||||
db.exec('DROP TABLE old_sessions');
|
|
||||||
db.pragma('user_version = 1220');
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const identityKeyMap = identityKeyMapSchema.parse(
|
for (const row of rows) {
|
||||||
JSON.parse(identityKeyMapJson)
|
try {
|
||||||
);
|
insertSession.run(
|
||||||
const registrationIdMap = registrationIdMapSchema.parse(
|
migrateSession(row, identityKeyMap, registrationIdMap, logger)
|
||||||
JSON.parse(registrationIdMapJson)
|
);
|
||||||
);
|
migrated += 1;
|
||||||
|
} catch (error) {
|
||||||
const getSessionsPage = db.prepare(
|
failed += 1;
|
||||||
'DELETE FROM old_sessions RETURNING * LIMIT 1000'
|
logger.error('failed to migrate session', Errors.toLogFormat(error));
|
||||||
);
|
|
||||||
const insertSession = db.prepare(`
|
|
||||||
INSERT INTO sessions
|
|
||||||
(id, ourServiceId, serviceId, conversationId, deviceId, record)
|
|
||||||
VALUES
|
|
||||||
($id, $ourServiceId, $serviceId, $conversationId, $deviceId, $record)
|
|
||||||
`);
|
|
||||||
|
|
||||||
let migrated = 0;
|
|
||||||
let failed = 0;
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
|
||||||
while (true) {
|
|
||||||
const rows: Array<PreviousSessionRowType> = getSessionsPage.all();
|
|
||||||
if (rows.length === 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const row of rows) {
|
|
||||||
try {
|
|
||||||
insertSession.run(
|
|
||||||
migrateSession(row, identityKeyMap, registrationIdMap, logger)
|
|
||||||
);
|
|
||||||
migrated += 1;
|
|
||||||
} catch (error) {
|
|
||||||
failed += 1;
|
|
||||||
logger.error(
|
|
||||||
'updateToSchemaVersion1220: failed to migrate session',
|
|
||||||
Errors.toLogFormat(error)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(
|
logger.info(`migrated ${migrated} sessions, ${failed} failed`);
|
||||||
`updateToSchemaVersion1220: migrated ${migrated} sessions, ` +
|
|
||||||
`${failed} failed`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.exec('DROP TABLE old_sessions');
|
db.exec('DROP TABLE old_sessions');
|
||||||
db.pragma('user_version = 1220');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1220: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,28 +1,12 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1230;
|
export default function updateToSchemaVersion1230(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
DROP INDEX IF EXISTS callLinks_adminKey;
|
||||||
|
|
||||||
export function updateToSchemaVersion1230(
|
CREATE INDEX callLinks_adminKey
|
||||||
currentVersion: number,
|
ON callLinks (adminKey);
|
||||||
db: Database,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1230) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
DROP INDEX IF EXISTS callLinks_adminKey;
|
|
||||||
|
|
||||||
CREATE INDEX callLinks_adminKey
|
|
||||||
ON callLinks (adminKey);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1230');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1230: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,33 +3,16 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1240;
|
export default function updateToSchemaVersion1240(db: Database): void {
|
||||||
|
const [createTable] = sql`
|
||||||
|
CREATE TABLE defunctCallLinks (
|
||||||
|
roomId TEXT NOT NULL PRIMARY KEY,
|
||||||
|
rootKey BLOB NOT NULL,
|
||||||
|
adminKey BLOB
|
||||||
|
) STRICT;
|
||||||
|
`;
|
||||||
|
|
||||||
export function updateToSchemaVersion1240(
|
db.exec(createTable);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1240) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [createTable] = sql`
|
|
||||||
CREATE TABLE defunctCallLinks (
|
|
||||||
roomId TEXT NOT NULL PRIMARY KEY,
|
|
||||||
rootKey BLOB NOT NULL,
|
|
||||||
adminKey BLOB
|
|
||||||
) STRICT;
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(createTable);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1240');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1240: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,28 +1,12 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 1250;
|
export default function updateToSchemaVersion1250(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export function updateToSchemaVersion1250(
|
ALTER TABLE defunctCallLinks ADD COLUMN storageID TEXT;
|
||||||
currentVersion: number,
|
ALTER TABLE defunctCallLinks ADD COLUMN storageVersion INTEGER;
|
||||||
db: Database,
|
ALTER TABLE defunctCallLinks ADD COLUMN storageUnknownFields BLOB;
|
||||||
logger: LoggerType
|
ALTER TABLE defunctCallLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
|
||||||
): void {
|
`);
|
||||||
if (currentVersion >= 1250) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE defunctCallLinks ADD COLUMN storageID TEXT;
|
|
||||||
ALTER TABLE defunctCallLinks ADD COLUMN storageVersion INTEGER;
|
|
||||||
ALTER TABLE defunctCallLinks ADD COLUMN storageUnknownFields BLOB;
|
|
||||||
ALTER TABLE defunctCallLinks ADD COLUMN storageNeedsSync INTEGER NOT NULL DEFAULT 0;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1250');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion1250: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,30 +1,13 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1260;
|
export default function updateToSchemaVersion1260(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
DROP INDEX IF EXISTS syncTasks_order;
|
||||||
|
CREATE INDEX syncTasks_delete ON syncTasks (attempts DESC);
|
||||||
|
`;
|
||||||
|
|
||||||
export function updateToSchemaVersion1260(
|
db.exec(query);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1260) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [query] = sql`
|
|
||||||
DROP INDEX IF EXISTS syncTasks_order;
|
|
||||||
CREATE INDEX syncTasks_delete ON syncTasks (attempts DESC);
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1260');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1260: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,53 +1,36 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1270;
|
export default function updateToSchemaVersion1270(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN timestamp INTEGER;
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN received_at_ms INTEGER;
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN unidentifiedDeliveryReceived INTEGER;
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN serverTimestamp INTEGER;
|
||||||
|
|
||||||
export function updateToSchemaVersion1270(
|
ALTER TABLE messages
|
||||||
currentVersion: number,
|
RENAME COLUMN source TO legacySource;
|
||||||
db: Database,
|
ALTER TABLE messages
|
||||||
logger: LoggerType
|
ADD COLUMN source TEXT;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1270) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
UPDATE messages SET
|
||||||
const [query] = sql`
|
timestamp = json_extract(json, '$.timestamp'),
|
||||||
ALTER TABLE messages
|
received_at_ms = json_extract(json, '$.received_at_ms'),
|
||||||
ADD COLUMN timestamp INTEGER;
|
unidentifiedDeliveryReceived =
|
||||||
ALTER TABLE messages
|
json_extract(json, '$.unidentifiedDeliveryReceived'),
|
||||||
ADD COLUMN received_at_ms INTEGER;
|
serverTimestamp =
|
||||||
ALTER TABLE messages
|
json_extract(json, '$.serverTimestamp'),
|
||||||
ADD COLUMN unidentifiedDeliveryReceived INTEGER;
|
source = IFNULL(json_extract(json, '$.source'), '+' || legacySource);
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN serverTimestamp INTEGER;
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
RENAME COLUMN source TO legacySource;
|
DROP COLUMN legacySource;
|
||||||
ALTER TABLE messages
|
`;
|
||||||
ADD COLUMN source TEXT;
|
|
||||||
|
|
||||||
UPDATE messages SET
|
db.exec(query);
|
||||||
timestamp = json_extract(json, '$.timestamp'),
|
|
||||||
received_at_ms = json_extract(json, '$.received_at_ms'),
|
|
||||||
unidentifiedDeliveryReceived =
|
|
||||||
json_extract(json, '$.unidentifiedDeliveryReceived'),
|
|
||||||
serverTimestamp =
|
|
||||||
json_extract(json, '$.serverTimestamp'),
|
|
||||||
source = IFNULL(json_extract(json, '$.source'), '+' || legacySource);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
DROP COLUMN legacySource;
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1270');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1270: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,166 +14,142 @@ import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
import { getOurUuid } from './41-uuid-keys';
|
import { getOurUuid } from './41-uuid-keys';
|
||||||
|
|
||||||
export const version = 1280;
|
export default function updateToSchemaVersion1280(
|
||||||
|
|
||||||
export function updateToSchemaVersion1280(
|
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 1280) {
|
const ourAci = getOurUuid(db);
|
||||||
return;
|
|
||||||
|
let rows = db.prepare('SELECT * FROM unprocessed').all();
|
||||||
|
|
||||||
|
const [query] = sql`
|
||||||
|
DROP TABLE unprocessed;
|
||||||
|
|
||||||
|
CREATE TABLE unprocessed(
|
||||||
|
id TEXT NOT NULL PRIMARY KEY ASC,
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
timestamp INTEGER NOT NULL,
|
||||||
|
attempts INTEGER NOT NULL,
|
||||||
|
receivedAtCounter INTEGER NOT NULL,
|
||||||
|
urgent INTEGER NOT NULL,
|
||||||
|
story INTEGER NOT NULL,
|
||||||
|
serverGuid TEXT NOT NULL,
|
||||||
|
serverTimestamp INTEGER NOT NULL,
|
||||||
|
isEncrypted INTEGER NOT NULL,
|
||||||
|
content BLOB NOT NULL,
|
||||||
|
messageAgeSec INTEGER NOT NULL,
|
||||||
|
destinationServiceId TEXT NOT NULL,
|
||||||
|
|
||||||
|
-- Not present for 1:1 messages and not sealed messages
|
||||||
|
groupId TEXT,
|
||||||
|
|
||||||
|
-- Not present for sealed envelopes
|
||||||
|
reportingToken BLOB,
|
||||||
|
source TEXT,
|
||||||
|
sourceServiceId TEXT,
|
||||||
|
sourceDevice TEXT,
|
||||||
|
|
||||||
|
-- Present only for PNP change number
|
||||||
|
updatedPni TEXT
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
|
CREATE INDEX unprocessed_timestamp ON unprocessed
|
||||||
|
(timestamp);
|
||||||
|
|
||||||
|
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
|
||||||
|
(receivedAtCounter);
|
||||||
|
`;
|
||||||
|
db.exec(query);
|
||||||
|
|
||||||
|
const insertStmt = db.prepare(`
|
||||||
|
INSERT INTO unprocessed
|
||||||
|
(id, type, timestamp, attempts, receivedAtCounter, urgent, story,
|
||||||
|
serverGuid, serverTimestamp, isEncrypted, content, source,
|
||||||
|
messageAgeSec, sourceServiceId, sourceDevice,
|
||||||
|
destinationServiceId, reportingToken)
|
||||||
|
VALUES
|
||||||
|
($id, $type, $timestamp, $attempts, $receivedAtCounter, $urgent, $story,
|
||||||
|
$serverGuid, $serverTimestamp, $isEncrypted, $content, $source,
|
||||||
|
$messageAgeSec, $sourceServiceId, $sourceDevice,
|
||||||
|
$destinationServiceId, $reportingToken);
|
||||||
|
`);
|
||||||
|
|
||||||
|
let oldEnvelopes = 0;
|
||||||
|
|
||||||
|
if (!ourAci) {
|
||||||
|
if (rows.length) {
|
||||||
|
logger.warn(`no aci, dropping ${rows.length} envelopes`);
|
||||||
|
rows = [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
for (const row of rows) {
|
||||||
const ourAci = getOurUuid(db);
|
const {
|
||||||
|
id,
|
||||||
|
envelope,
|
||||||
|
decrypted,
|
||||||
|
timestamp,
|
||||||
|
attempts,
|
||||||
|
version: envelopeVersion,
|
||||||
|
receivedAtCounter,
|
||||||
|
urgent,
|
||||||
|
story,
|
||||||
|
serverGuid,
|
||||||
|
serverTimestamp,
|
||||||
|
...rest
|
||||||
|
} = row;
|
||||||
|
|
||||||
let rows = db.prepare('SELECT * FROM unprocessed').all();
|
// Skip old and/or invalid rows
|
||||||
|
if (envelopeVersion !== 2 || !envelope) {
|
||||||
const [query] = sql`
|
oldEnvelopes += 1;
|
||||||
DROP TABLE unprocessed;
|
continue;
|
||||||
|
|
||||||
CREATE TABLE unprocessed(
|
|
||||||
id TEXT NOT NULL PRIMARY KEY ASC,
|
|
||||||
type INTEGER NOT NULL,
|
|
||||||
timestamp INTEGER NOT NULL,
|
|
||||||
attempts INTEGER NOT NULL,
|
|
||||||
receivedAtCounter INTEGER NOT NULL,
|
|
||||||
urgent INTEGER NOT NULL,
|
|
||||||
story INTEGER NOT NULL,
|
|
||||||
serverGuid TEXT NOT NULL,
|
|
||||||
serverTimestamp INTEGER NOT NULL,
|
|
||||||
isEncrypted INTEGER NOT NULL,
|
|
||||||
content BLOB NOT NULL,
|
|
||||||
messageAgeSec INTEGER NOT NULL,
|
|
||||||
destinationServiceId TEXT NOT NULL,
|
|
||||||
|
|
||||||
-- Not present for 1:1 messages and not sealed messages
|
|
||||||
groupId TEXT,
|
|
||||||
|
|
||||||
-- Not present for sealed envelopes
|
|
||||||
reportingToken BLOB,
|
|
||||||
source TEXT,
|
|
||||||
sourceServiceId TEXT,
|
|
||||||
sourceDevice TEXT,
|
|
||||||
|
|
||||||
-- Present only for PNP change number
|
|
||||||
updatedPni TEXT
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE INDEX unprocessed_timestamp ON unprocessed
|
|
||||||
(timestamp);
|
|
||||||
|
|
||||||
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
|
|
||||||
(receivedAtCounter);
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
const insertStmt = db.prepare(`
|
|
||||||
INSERT INTO unprocessed
|
|
||||||
(id, type, timestamp, attempts, receivedAtCounter, urgent, story,
|
|
||||||
serverGuid, serverTimestamp, isEncrypted, content, source,
|
|
||||||
messageAgeSec, sourceServiceId, sourceDevice,
|
|
||||||
destinationServiceId, reportingToken)
|
|
||||||
VALUES
|
|
||||||
($id, $type, $timestamp, $attempts, $receivedAtCounter, $urgent, $story,
|
|
||||||
$serverGuid, $serverTimestamp, $isEncrypted, $content, $source,
|
|
||||||
$messageAgeSec, $sourceServiceId, $sourceDevice,
|
|
||||||
$destinationServiceId, $reportingToken);
|
|
||||||
`);
|
|
||||||
|
|
||||||
let oldEnvelopes = 0;
|
|
||||||
|
|
||||||
if (!ourAci) {
|
|
||||||
if (rows.length) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion1280: no aci, dropping ${rows.length} envelopes`
|
|
||||||
);
|
|
||||||
rows = [];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const row of rows) {
|
try {
|
||||||
const {
|
const decoded = Proto.Envelope.decode(
|
||||||
id,
|
Buffer.from(String(envelope), 'base64')
|
||||||
envelope,
|
|
||||||
decrypted,
|
|
||||||
timestamp,
|
|
||||||
attempts,
|
|
||||||
version: envelopeVersion,
|
|
||||||
receivedAtCounter,
|
|
||||||
urgent,
|
|
||||||
story,
|
|
||||||
serverGuid,
|
|
||||||
serverTimestamp,
|
|
||||||
...rest
|
|
||||||
} = row;
|
|
||||||
|
|
||||||
// Skip old and/or invalid rows
|
|
||||||
if (envelopeVersion !== 2 || !envelope) {
|
|
||||||
oldEnvelopes += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const decoded = Proto.Envelope.decode(
|
|
||||||
Buffer.from(String(envelope), 'base64')
|
|
||||||
);
|
|
||||||
if (!decoded.content) {
|
|
||||||
throw new Error('Missing envelope content');
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = decrypted
|
|
||||||
? Buffer.from(String(decrypted), 'base64')
|
|
||||||
: decoded.content;
|
|
||||||
|
|
||||||
insertStmt.run({
|
|
||||||
...rest,
|
|
||||||
id,
|
|
||||||
type: decoded.type ?? Proto.Envelope.Type.UNKNOWN,
|
|
||||||
content: content ?? null,
|
|
||||||
isEncrypted: decrypted ? 0 : 1,
|
|
||||||
timestamp: timestamp || Date.now(),
|
|
||||||
attempts: attempts || 0,
|
|
||||||
receivedAtCounter: receivedAtCounter || 0,
|
|
||||||
urgent: urgent ? 1 : 0,
|
|
||||||
story: story ? 1 : 0,
|
|
||||||
serverGuid: serverGuid || getGuid(),
|
|
||||||
serverTimestamp: serverTimestamp || 0,
|
|
||||||
destinationServiceId:
|
|
||||||
normalizeServiceId(
|
|
||||||
decoded.destinationServiceId || ourAci,
|
|
||||||
'Envelope.destinationServiceId'
|
|
||||||
) ?? null,
|
|
||||||
updatedPni: isUntaggedPniString(decoded.updatedPni)
|
|
||||||
? normalizePni(
|
|
||||||
toTaggedPni(decoded.updatedPni),
|
|
||||||
'Envelope.updatedPni'
|
|
||||||
)
|
|
||||||
: null,
|
|
||||||
// Sadly not captured previously
|
|
||||||
messageAgeSec: 0,
|
|
||||||
reportingToken: decoded.reportSpamToken?.length
|
|
||||||
? decoded.reportSpamToken
|
|
||||||
: null,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(
|
|
||||||
'updateToSchemaVersion1280: failed to migrate unprocessed',
|
|
||||||
id,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (oldEnvelopes !== 0) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion1280: dropped ${oldEnvelopes} envelopes`
|
|
||||||
);
|
);
|
||||||
|
if (!decoded.content) {
|
||||||
|
throw new Error('Missing envelope content');
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = decrypted
|
||||||
|
? Buffer.from(String(decrypted), 'base64')
|
||||||
|
: decoded.content;
|
||||||
|
|
||||||
|
insertStmt.run({
|
||||||
|
...rest,
|
||||||
|
id,
|
||||||
|
type: decoded.type ?? Proto.Envelope.Type.UNKNOWN,
|
||||||
|
content: content ?? null,
|
||||||
|
isEncrypted: decrypted ? 0 : 1,
|
||||||
|
timestamp: timestamp || Date.now(),
|
||||||
|
attempts: attempts || 0,
|
||||||
|
receivedAtCounter: receivedAtCounter || 0,
|
||||||
|
urgent: urgent ? 1 : 0,
|
||||||
|
story: story ? 1 : 0,
|
||||||
|
serverGuid: serverGuid || getGuid(),
|
||||||
|
serverTimestamp: serverTimestamp || 0,
|
||||||
|
destinationServiceId:
|
||||||
|
normalizeServiceId(
|
||||||
|
decoded.destinationServiceId || ourAci,
|
||||||
|
'Envelope.destinationServiceId'
|
||||||
|
) ?? null,
|
||||||
|
updatedPni: isUntaggedPniString(decoded.updatedPni)
|
||||||
|
? normalizePni(toTaggedPni(decoded.updatedPni), 'Envelope.updatedPni')
|
||||||
|
: null,
|
||||||
|
// Sadly not captured previously
|
||||||
|
messageAgeSec: 0,
|
||||||
|
reportingToken: decoded.reportSpamToken?.length
|
||||||
|
? decoded.reportSpamToken
|
||||||
|
: null,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('failed to migrate unprocessed', id, error);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
db.pragma('user_version = 1280');
|
if (oldEnvelopes !== 0) {
|
||||||
})();
|
logger.warn(`dropped ${oldEnvelopes} envelopes`);
|
||||||
|
}
|
||||||
logger.info('updateToSchemaVersion1280: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,34 +1,17 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1290;
|
export default function updateToSchemaVersion1290(db: WritableDB): void {
|
||||||
|
const [query] = sql`
|
||||||
|
ALTER TABLE unprocessed RENAME COLUMN sourceDevice TO legacySourceDevice;
|
||||||
|
ALTER TABLE unprocessed ADD COLUMN sourceDevice INTEGER;
|
||||||
|
|
||||||
export function updateToSchemaVersion1290(
|
UPDATE unprocessed
|
||||||
currentVersion: number,
|
SET sourceDevice = legacySourceDevice;
|
||||||
db: WritableDB,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1290) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE unprocessed DROP COLUMN legacySourceDevice;
|
||||||
const [query] = sql`
|
`;
|
||||||
ALTER TABLE unprocessed RENAME COLUMN sourceDevice TO legacySourceDevice;
|
db.exec(query);
|
||||||
ALTER TABLE unprocessed ADD COLUMN sourceDevice INTEGER;
|
|
||||||
|
|
||||||
UPDATE unprocessed
|
|
||||||
SET sourceDevice = legacySourceDevice;
|
|
||||||
|
|
||||||
ALTER TABLE unprocessed DROP COLUMN legacySourceDevice;
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1290');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1290: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,35 +1,18 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1300;
|
export default function updateToSchemaVersion1300(db: WritableDB): void {
|
||||||
|
const [query] = sql`
|
||||||
|
ALTER TABLE sticker_references
|
||||||
|
ADD COLUMN stickerId INTEGER NOT NULL DEFAULT -1;
|
||||||
|
ALTER TABLE sticker_references
|
||||||
|
ADD COLUMN isUnresolved INTEGER NOT NULL DEFAULT 0;
|
||||||
|
|
||||||
export function updateToSchemaVersion1300(
|
CREATE INDEX unresolved_sticker_refs
|
||||||
currentVersion: number,
|
ON sticker_references (packId, stickerId)
|
||||||
db: WritableDB,
|
WHERE isUnresolved IS 1;
|
||||||
logger: LoggerType
|
`;
|
||||||
): void {
|
db.exec(query);
|
||||||
if (currentVersion >= 1300) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [query] = sql`
|
|
||||||
ALTER TABLE sticker_references
|
|
||||||
ADD COLUMN stickerId INTEGER NOT NULL DEFAULT -1;
|
|
||||||
ALTER TABLE sticker_references
|
|
||||||
ADD COLUMN isUnresolved INTEGER NOT NULL DEFAULT 0;
|
|
||||||
|
|
||||||
CREATE INDEX unresolved_sticker_refs
|
|
||||||
ON sticker_references (packId, stickerId)
|
|
||||||
WHERE isUnresolved IS 1;
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1300');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1300: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,37 +4,24 @@ import type { LoggerType } from '../../types/Logging';
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1310;
|
|
||||||
|
|
||||||
// Value from ts/util/timestamp.ts at the time of creation of this migration
|
// Value from ts/util/timestamp.ts at the time of creation of this migration
|
||||||
const MAX_SAFE_DATE = 8640000000000000;
|
const MAX_SAFE_DATE = 8640000000000000;
|
||||||
|
|
||||||
export function updateToSchemaVersion1310(
|
export default function updateToSchemaVersion1310(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 1310) {
|
const [query, params] = sql`
|
||||||
return;
|
UPDATE conversations
|
||||||
|
SET json = json_replace(
|
||||||
|
json,
|
||||||
|
'$.muteExpiresAt',
|
||||||
|
9007199254740991 -- max safe integer
|
||||||
|
)
|
||||||
|
WHERE json ->> '$.muteExpiresAt' IS ${MAX_SAFE_DATE};
|
||||||
|
`;
|
||||||
|
const { changes } = db.prepare(query).run(params);
|
||||||
|
if (changes !== 0) {
|
||||||
|
logger.warn(`fixed ${changes} conversations`);
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [query, params] = sql`
|
|
||||||
UPDATE conversations
|
|
||||||
SET json = json_replace(
|
|
||||||
json,
|
|
||||||
'$.muteExpiresAt',
|
|
||||||
9007199254740991 -- max safe integer
|
|
||||||
)
|
|
||||||
WHERE json ->> '$.muteExpiresAt' IS ${MAX_SAFE_DATE};
|
|
||||||
`;
|
|
||||||
const { changes } = db.prepare(query).run(params);
|
|
||||||
if (changes !== 0) {
|
|
||||||
logger.warn(`updateToSchemaVersion1310: fixed ${changes} conversations`);
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 1310');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1310: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,38 +1,21 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1320;
|
export default function updateToSchemaVersion1320(db: WritableDB): void {
|
||||||
|
const [query] = sql`
|
||||||
|
DROP INDEX unprocessed_timestamp;
|
||||||
|
|
||||||
export function updateToSchemaVersion1320(
|
ALTER TABLE unprocessed
|
||||||
currentVersion: number,
|
ADD COLUMN receivedAtDate INTEGER DEFAULT 0 NOT NULL;
|
||||||
db: WritableDB,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1320) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
UPDATE unprocessed
|
||||||
const [query] = sql`
|
SET receivedAtDate = timestamp;
|
||||||
DROP INDEX unprocessed_timestamp;
|
|
||||||
|
|
||||||
ALTER TABLE unprocessed
|
CREATE INDEX unprocessed_byReceivedAtDate ON unprocessed
|
||||||
ADD COLUMN receivedAtDate INTEGER DEFAULT 0 NOT NULL;
|
(receivedAtDate);
|
||||||
|
`;
|
||||||
UPDATE unprocessed
|
db.exec(query);
|
||||||
SET receivedAtDate = timestamp;
|
|
||||||
|
|
||||||
CREATE INDEX unprocessed_byReceivedAtDate ON unprocessed
|
|
||||||
(receivedAtDate);
|
|
||||||
`;
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1320');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1320: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +1,12 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1330;
|
export default function updateToSchemaVersion1330(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
CREATE INDEX syncTasks_type ON syncTasks (type);
|
||||||
|
`;
|
||||||
|
|
||||||
export function updateToSchemaVersion1330(
|
db.exec(query);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1330) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const [query] = sql`
|
|
||||||
CREATE INDEX syncTasks_type ON syncTasks (type);
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1330');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1330: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,44 +1,27 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1340;
|
export default function updateToSchemaVersion1340(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
CREATE TABLE recentGifs (
|
||||||
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
previewMedia_url TEXT NOT NULL,
|
||||||
|
previewMedia_width INTEGER NOT NULL,
|
||||||
|
previewMedia_height INTEGER NOT NULL,
|
||||||
|
attachmentMedia_url TEXT NOT NULL,
|
||||||
|
attachmentMedia_width INTEGER NOT NULL,
|
||||||
|
attachmentMedia_height INTEGER NOT NULL,
|
||||||
|
lastUsedAt INTEGER NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
export function updateToSchemaVersion1340(
|
CREATE INDEX recentGifs_order ON recentGifs (
|
||||||
currentVersion: number,
|
lastUsedAt DESC
|
||||||
db: Database,
|
);
|
||||||
logger: LoggerType
|
`;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1340) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
db.exec(query);
|
||||||
const [query] = sql`
|
|
||||||
CREATE TABLE recentGifs (
|
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
description TEXT NOT NULL,
|
|
||||||
previewMedia_url TEXT NOT NULL,
|
|
||||||
previewMedia_width INTEGER NOT NULL,
|
|
||||||
previewMedia_height INTEGER NOT NULL,
|
|
||||||
attachmentMedia_url TEXT NOT NULL,
|
|
||||||
attachmentMedia_width INTEGER NOT NULL,
|
|
||||||
attachmentMedia_height INTEGER NOT NULL,
|
|
||||||
lastUsedAt INTEGER NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE INDEX recentGifs_order ON recentGifs (
|
|
||||||
lastUsedAt DESC
|
|
||||||
);
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1340');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1340: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,58 +1,41 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export const version = 1350;
|
export default function updateToSchemaVersion1350(db: Database): void {
|
||||||
|
const [query] = sql`
|
||||||
|
CREATE TABLE notificationProfiles(
|
||||||
|
id TEXT PRIMARY KEY NOT NULL,
|
||||||
|
|
||||||
export function updateToSchemaVersion1350(
|
name TEXT NOT NULL,
|
||||||
currentVersion: number,
|
emoji TEXT,
|
||||||
db: Database,
|
/* A numeric representation of a color, like 0xAARRGGBB */
|
||||||
logger: LoggerType
|
color INTEGER NOT NULL,
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1350) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
createdAtMs INTEGER NOT NULL,
|
||||||
const [query] = sql`
|
|
||||||
CREATE TABLE notificationProfiles(
|
|
||||||
id TEXT PRIMARY KEY NOT NULL,
|
|
||||||
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
emoji TEXT,
|
|
||||||
/* A numeric representation of a color, like 0xAARRGGBB */
|
|
||||||
color INTEGER NOT NULL,
|
|
||||||
|
|
||||||
createdAtMs INTEGER NOT NULL,
|
|
||||||
|
|
||||||
allowAllCalls INTEGER NOT NULL,
|
|
||||||
allowAllMentions INTEGER NOT NULL,
|
|
||||||
|
|
||||||
/* A JSON array of conversationId strings */
|
allowAllCalls INTEGER NOT NULL,
|
||||||
allowedMembersJson TEXT NOT NULL,
|
allowAllMentions INTEGER NOT NULL,
|
||||||
scheduleEnabled INTEGER NOT NULL,
|
|
||||||
|
|
||||||
/* 24-hour clock int, 0000-2359 (e.g., 15, 900, 1130, 2345) */
|
|
||||||
scheduleStartTime INTEGER,
|
|
||||||
scheduleEndTime INTEGER,
|
|
||||||
|
|
||||||
/* A JSON object with true/false for each of the numbers in the Protobuf enum */
|
/* A JSON array of conversationId strings */
|
||||||
scheduleDaysEnabledJson TEXT,
|
allowedMembersJson TEXT NOT NULL,
|
||||||
deletedAtTimestampMs INTEGER,
|
scheduleEnabled INTEGER NOT NULL,
|
||||||
|
|
||||||
storageID TEXT,
|
/* 24-hour clock int, 0000-2359 (e.g., 15, 900, 1130, 2345) */
|
||||||
storageVersion INTEGER,
|
scheduleStartTime INTEGER,
|
||||||
storageUnknownFields BLOB,
|
scheduleEndTime INTEGER,
|
||||||
storageNeedsSync INTEGER NOT NULL DEFAULT 0
|
|
||||||
) STRICT;
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(query);
|
/* A JSON object with true/false for each of the numbers in the Protobuf enum */
|
||||||
|
scheduleDaysEnabledJson TEXT,
|
||||||
|
deletedAtTimestampMs INTEGER,
|
||||||
|
|
||||||
db.pragma('user_version = 1350');
|
storageID TEXT,
|
||||||
})();
|
storageVersion INTEGER,
|
||||||
|
storageUnknownFields BLOB,
|
||||||
|
storageNeedsSync INTEGER NOT NULL DEFAULT 0
|
||||||
|
) STRICT;
|
||||||
|
`;
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1350: success!');
|
db.exec(query);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,119 +1,102 @@
|
||||||
// Copyright 2024 Signal Messenger, LLC
|
// Copyright 2024 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1360;
|
export default function updateToSchemaVersion1360(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
|
DROP TABLE IF EXISTS message_attachments;
|
||||||
|
`);
|
||||||
|
|
||||||
export function updateToSchemaVersion1360(
|
db.exec(`
|
||||||
currentVersion: number,
|
CREATE TABLE message_attachments (
|
||||||
db: WritableDB,
|
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
|
||||||
logger: LoggerType
|
-- For editHistoryIndex to be part of the primary key, it cannot be NULL in strict tables.
|
||||||
): void {
|
-- For that reason, we use a value of -1 to indicate that it is the root message (not in editHistory)
|
||||||
if (currentVersion >= 1360) {
|
editHistoryIndex INTEGER NOT NULL,
|
||||||
return;
|
attachmentType TEXT NOT NULL, -- 'long-message' | 'quote' | 'attachment' | 'preview' | 'contact' | 'sticker'
|
||||||
}
|
orderInMessage INTEGER NOT NULL,
|
||||||
|
conversationId TEXT NOT NULL,
|
||||||
|
sentAt INTEGER NOT NULL,
|
||||||
|
clientUuid TEXT,
|
||||||
|
size INTEGER NOT NULL,
|
||||||
|
contentType TEXT NOT NULL,
|
||||||
|
path TEXT,
|
||||||
|
plaintextHash TEXT,
|
||||||
|
localKey TEXT,
|
||||||
|
caption TEXT,
|
||||||
|
fileName TEXT,
|
||||||
|
blurHash TEXT,
|
||||||
|
height INTEGER,
|
||||||
|
width INTEGER,
|
||||||
|
digest TEXT,
|
||||||
|
key TEXT,
|
||||||
|
iv TEXT,
|
||||||
|
downloadPath TEXT,
|
||||||
|
version INTEGER,
|
||||||
|
incrementalMac TEXT,
|
||||||
|
incrementalMacChunkSize INTEGER,
|
||||||
|
transitCdnKey TEXT,
|
||||||
|
transitCdnNumber INTEGER,
|
||||||
|
transitCdnUploadTimestamp INTEGER,
|
||||||
|
backupMediaName TEXT,
|
||||||
|
backupCdnNumber INTEGER,
|
||||||
|
isReencryptableToSameDigest INTEGER,
|
||||||
|
reencryptionIv TEXT,
|
||||||
|
reencryptionKey TEXT,
|
||||||
|
reencryptionDigest TEXT,
|
||||||
|
thumbnailPath TEXT,
|
||||||
|
thumbnailSize INTEGER,
|
||||||
|
thumbnailContentType TEXT,
|
||||||
|
thumbnailLocalKey TEXT,
|
||||||
|
thumbnailVersion INTEGER,
|
||||||
|
screenshotPath TEXT,
|
||||||
|
screenshotSize INTEGER,
|
||||||
|
screenshotContentType TEXT,
|
||||||
|
screenshotLocalKey TEXT,
|
||||||
|
screenshotVersion INTEGER,
|
||||||
|
backupThumbnailPath TEXT,
|
||||||
|
backupThumbnailSize INTEGER,
|
||||||
|
backupThumbnailContentType TEXT,
|
||||||
|
backupThumbnailLocalKey TEXT,
|
||||||
|
backupThumbnailVersion INTEGER,
|
||||||
|
storyTextAttachmentJson TEXT,
|
||||||
|
localBackupPath TEXT,
|
||||||
|
flags INTEGER,
|
||||||
|
error INTEGER,
|
||||||
|
wasTooBig INTEGER,
|
||||||
|
isCorrupted INTEGER,
|
||||||
|
copiedFromQuotedAttachment INTEGER,
|
||||||
|
pending INTEGER,
|
||||||
|
backfillError INTEGER,
|
||||||
|
PRIMARY KEY (messageId, editHistoryIndex, attachmentType, orderInMessage)
|
||||||
|
) STRICT;
|
||||||
|
`);
|
||||||
|
|
||||||
db.transaction(() => {
|
// The following indexes were removed in migration 1370
|
||||||
db.exec(`
|
|
||||||
DROP TABLE IF EXISTS message_attachments;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.exec(`
|
// db.exec(
|
||||||
CREATE TABLE message_attachments (
|
// 'CREATE INDEX message_attachments_messageId
|
||||||
messageId TEXT NOT NULL REFERENCES messages(id) ON DELETE CASCADE,
|
// ON message_attachments (messageId);'
|
||||||
-- For editHistoryIndex to be part of the primary key, it cannot be NULL in strict tables.
|
// );
|
||||||
-- For that reason, we use a value of -1 to indicate that it is the root message (not in editHistory)
|
// db.exec(
|
||||||
editHistoryIndex INTEGER NOT NULL,
|
// 'CREATE INDEX message_attachments_plaintextHash
|
||||||
attachmentType TEXT NOT NULL, -- 'long-message' | 'quote' | 'attachment' | 'preview' | 'contact' | 'sticker'
|
// ON message_attachments (plaintextHash);'
|
||||||
orderInMessage INTEGER NOT NULL,
|
// );
|
||||||
conversationId TEXT NOT NULL,
|
// db.exec(
|
||||||
sentAt INTEGER NOT NULL,
|
// 'CREATE INDEX message_attachments_path
|
||||||
clientUuid TEXT,
|
// ON message_attachments (path);'
|
||||||
size INTEGER NOT NULL,
|
// );
|
||||||
contentType TEXT NOT NULL,
|
// db.exec(
|
||||||
path TEXT,
|
// 'CREATE INDEX message_attachments_all_thumbnailPath
|
||||||
plaintextHash TEXT,
|
// ON message_attachments (thumbnailPath);'
|
||||||
localKey TEXT,
|
// );
|
||||||
caption TEXT,
|
// db.exec(
|
||||||
fileName TEXT,
|
// 'CREATE INDEX message_attachments_all_screenshotPath
|
||||||
blurHash TEXT,
|
// ON message_attachments (screenshotPath);'
|
||||||
height INTEGER,
|
// );
|
||||||
width INTEGER,
|
// db.exec(
|
||||||
digest TEXT,
|
// 'CREATE INDEX message_attachments_all_backupThumbnailPath
|
||||||
key TEXT,
|
// ON message_attachments (backupThumbnailPath);'
|
||||||
iv TEXT,
|
// );
|
||||||
downloadPath TEXT,
|
|
||||||
version INTEGER,
|
|
||||||
incrementalMac TEXT,
|
|
||||||
incrementalMacChunkSize INTEGER,
|
|
||||||
transitCdnKey TEXT,
|
|
||||||
transitCdnNumber INTEGER,
|
|
||||||
transitCdnUploadTimestamp INTEGER,
|
|
||||||
backupMediaName TEXT,
|
|
||||||
backupCdnNumber INTEGER,
|
|
||||||
isReencryptableToSameDigest INTEGER,
|
|
||||||
reencryptionIv TEXT,
|
|
||||||
reencryptionKey TEXT,
|
|
||||||
reencryptionDigest TEXT,
|
|
||||||
thumbnailPath TEXT,
|
|
||||||
thumbnailSize INTEGER,
|
|
||||||
thumbnailContentType TEXT,
|
|
||||||
thumbnailLocalKey TEXT,
|
|
||||||
thumbnailVersion INTEGER,
|
|
||||||
screenshotPath TEXT,
|
|
||||||
screenshotSize INTEGER,
|
|
||||||
screenshotContentType TEXT,
|
|
||||||
screenshotLocalKey TEXT,
|
|
||||||
screenshotVersion INTEGER,
|
|
||||||
backupThumbnailPath TEXT,
|
|
||||||
backupThumbnailSize INTEGER,
|
|
||||||
backupThumbnailContentType TEXT,
|
|
||||||
backupThumbnailLocalKey TEXT,
|
|
||||||
backupThumbnailVersion INTEGER,
|
|
||||||
storyTextAttachmentJson TEXT,
|
|
||||||
localBackupPath TEXT,
|
|
||||||
flags INTEGER,
|
|
||||||
error INTEGER,
|
|
||||||
wasTooBig INTEGER,
|
|
||||||
isCorrupted INTEGER,
|
|
||||||
copiedFromQuotedAttachment INTEGER,
|
|
||||||
pending INTEGER,
|
|
||||||
backfillError INTEGER,
|
|
||||||
PRIMARY KEY (messageId, editHistoryIndex, attachmentType, orderInMessage)
|
|
||||||
) STRICT;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// The following indexes were removed in migration 1370
|
|
||||||
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_messageId
|
|
||||||
// ON message_attachments (messageId);'
|
|
||||||
// );
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_plaintextHash
|
|
||||||
// ON message_attachments (plaintextHash);'
|
|
||||||
// );
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_path
|
|
||||||
// ON message_attachments (path);'
|
|
||||||
// );
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_all_thumbnailPath
|
|
||||||
// ON message_attachments (thumbnailPath);'
|
|
||||||
// );
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_all_screenshotPath
|
|
||||||
// ON message_attachments (screenshotPath);'
|
|
||||||
// );
|
|
||||||
// db.exec(
|
|
||||||
// 'CREATE INDEX message_attachments_all_backupThumbnailPath
|
|
||||||
// ON message_attachments (backupThumbnailPath);'
|
|
||||||
// );
|
|
||||||
|
|
||||||
db.pragma('user_version = 1360');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1360: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,31 +1,15 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1370;
|
export default function updateToSchemaVersion1370(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
export function updateToSchemaVersion1370(
|
DROP INDEX IF EXISTS message_attachments_messageId;
|
||||||
currentVersion: number,
|
DROP INDEX IF EXISTS message_attachments_plaintextHash;
|
||||||
db: WritableDB,
|
DROP INDEX IF EXISTS message_attachments_path;
|
||||||
logger: LoggerType
|
DROP INDEX IF EXISTS message_attachments_all_thumbnailPath;
|
||||||
): void {
|
DROP INDEX IF EXISTS message_attachments_all_screenshotPath;
|
||||||
if (currentVersion >= 1370) {
|
DROP INDEX IF EXISTS message_attachments_all_backupThumbnailPath;
|
||||||
return;
|
`);
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
DROP INDEX IF EXISTS message_attachments_messageId;
|
|
||||||
DROP INDEX IF EXISTS message_attachments_plaintextHash;
|
|
||||||
DROP INDEX IF EXISTS message_attachments_path;
|
|
||||||
DROP INDEX IF EXISTS message_attachments_all_thumbnailPath;
|
|
||||||
DROP INDEX IF EXISTS message_attachments_all_screenshotPath;
|
|
||||||
DROP INDEX IF EXISTS message_attachments_all_backupThumbnailPath;
|
|
||||||
`);
|
|
||||||
db.pragma('user_version = 1370');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1370: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,35 +1,19 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1380;
|
export default function updateToSchemaVersion1380(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE donationReceipts(
|
||||||
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
currencyType TEXT NOT NULL,
|
||||||
|
paymentAmount INTEGER NOT NULL,
|
||||||
|
paymentDetailJson TEXT NOT NULL,
|
||||||
|
paymentType TEXT NOT NULL,
|
||||||
|
timestamp INTEGER NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
export function updateToSchemaVersion1380(
|
CREATE INDEX donationReceipts_byTimestamp on donationReceipts(timestamp);
|
||||||
currentVersion: number,
|
`);
|
||||||
db: WritableDB,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1380) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
CREATE TABLE donationReceipts(
|
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
|
||||||
currencyType TEXT NOT NULL,
|
|
||||||
paymentAmount INTEGER NOT NULL,
|
|
||||||
paymentDetailJson TEXT NOT NULL,
|
|
||||||
paymentType TEXT NOT NULL,
|
|
||||||
timestamp INTEGER NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE INDEX donationReceipts_byTimestamp on donationReceipts(timestamp);
|
|
||||||
`);
|
|
||||||
db.pragma('user_version = 1380');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1380: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,53 +1,36 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { type WritableDB } from '../Interface';
|
import { type WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1390;
|
export default function updateToSchemaVersion1390(db: WritableDB): void {
|
||||||
|
// TODO: DESKTOP-8879 Digest column is only used for deduplication purposes; here we
|
||||||
|
// genericize its name to attachmentSignature to allow jobs to be added with
|
||||||
|
// plaintextHash and no digest
|
||||||
|
db.exec(`
|
||||||
|
ALTER TABLE attachment_downloads
|
||||||
|
RENAME COLUMN digest TO attachmentSignature;
|
||||||
|
`);
|
||||||
|
|
||||||
export function updateToSchemaVersion1390(
|
// We no longer these need columns due to the new mediaName derivation
|
||||||
currentVersion: number,
|
db.exec(`
|
||||||
db: WritableDB,
|
ALTER TABLE message_attachments
|
||||||
logger: LoggerType
|
DROP COLUMN iv;
|
||||||
): void {
|
ALTER TABLE message_attachments
|
||||||
if (currentVersion >= 1390) {
|
DROP COLUMN isReencryptableToSameDigest;
|
||||||
return;
|
ALTER TABLE message_attachments
|
||||||
}
|
DROP COLUMN reencryptionIv;
|
||||||
|
ALTER TABLE message_attachments
|
||||||
|
DROP COLUMN reencryptionKey;
|
||||||
|
ALTER TABLE message_attachments
|
||||||
|
DROP COLUMN reencryptionDigest;
|
||||||
|
ALTER TABLE message_attachments
|
||||||
|
DROP COLUMN backupMediaName;
|
||||||
|
`);
|
||||||
|
|
||||||
db.transaction(() => {
|
// Because mediaName has changed, backupCdnNumber is no longer accurate
|
||||||
// TODO: DESKTOP-8879 Digest column is only used for deduplication purposes; here we
|
db.exec(`
|
||||||
// genericize its name to attachmentSignature to allow jobs to be added with
|
UPDATE message_attachments
|
||||||
// plaintextHash and no digest
|
SET backupCdnNumber = NULL;
|
||||||
db.exec(`
|
`);
|
||||||
ALTER TABLE attachment_downloads
|
|
||||||
RENAME COLUMN digest TO attachmentSignature;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// We no longer these need columns due to the new mediaName derivation
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN iv;
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN isReencryptableToSameDigest;
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN reencryptionIv;
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN reencryptionKey;
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN reencryptionDigest;
|
|
||||||
ALTER TABLE message_attachments
|
|
||||||
DROP COLUMN backupMediaName;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Because mediaName has changed, backupCdnNumber is no longer accurate
|
|
||||||
db.exec(`
|
|
||||||
UPDATE message_attachments
|
|
||||||
SET backupCdnNumber = NULL;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1390');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1390: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,27 +1,11 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1400;
|
export default function updateToSchemaVersion1400(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
export function updateToSchemaVersion1400(
|
ALTER TABLE donationReceipts DROP COLUMN paymentDetailJson;
|
||||||
currentVersion: number,
|
ALTER TABLE donationReceipts DROP COLUMN paymentType;
|
||||||
db: WritableDB,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1400) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE donationReceipts DROP COLUMN paymentDetailJson;
|
|
||||||
ALTER TABLE donationReceipts DROP COLUMN paymentType;
|
|
||||||
`);
|
|
||||||
db.pragma('user_version = 1400');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1400: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,41 +1,24 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { type WritableDB } from '../Interface';
|
import { type WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1410;
|
export default function updateToSchemaVersion1410(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
|
UPDATE conversations
|
||||||
|
SET json = json_remove(json,
|
||||||
|
'$.wallpaperPreset',
|
||||||
|
'$.wallpaperPhotoPointerBase64',
|
||||||
|
'$.dimWallpaperInDarkMode',
|
||||||
|
'$.autoBubbleColor'
|
||||||
|
);
|
||||||
|
|
||||||
export function updateToSchemaVersion1410(
|
DELETE FROM items
|
||||||
currentVersion: number,
|
WHERE id IN (
|
||||||
db: WritableDB,
|
'defaultWallpaperPhotoPointer',
|
||||||
logger: LoggerType
|
'defaultWallpaperPreset',
|
||||||
): void {
|
'defaultDimWallpaperInDarkMode',
|
||||||
if (currentVersion >= 1410) {
|
'defaultAutoBubbleColor'
|
||||||
return;
|
);
|
||||||
}
|
`);
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
UPDATE conversations
|
|
||||||
SET json = json_remove(json,
|
|
||||||
'$.wallpaperPreset',
|
|
||||||
'$.wallpaperPhotoPointerBase64',
|
|
||||||
'$.dimWallpaperInDarkMode',
|
|
||||||
'$.autoBubbleColor'
|
|
||||||
);
|
|
||||||
|
|
||||||
DELETE FROM items
|
|
||||||
WHERE id IN (
|
|
||||||
'defaultWallpaperPhotoPointer',
|
|
||||||
'defaultWallpaperPreset',
|
|
||||||
'defaultDimWallpaperInDarkMode',
|
|
||||||
'defaultAutoBubbleColor'
|
|
||||||
);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 1410');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1410: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,70 +1,53 @@
|
||||||
// Copyright 2025 Signal Messenger, LLC
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { AttachmentDownloadSource, type WritableDB } from '../Interface';
|
import { AttachmentDownloadSource, type WritableDB } from '../Interface';
|
||||||
|
|
||||||
export const version = 1420;
|
export default function updateToSchemaVersion1420(db: WritableDB): void {
|
||||||
|
db.exec(`
|
||||||
|
ALTER TABLE attachment_downloads
|
||||||
|
ADD COLUMN originalSource TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
|
||||||
|
|
||||||
export function updateToSchemaVersion1420(
|
UPDATE attachment_downloads
|
||||||
currentVersion: number,
|
SET originalSource = source;
|
||||||
db: WritableDB,
|
`);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 1420) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
db.exec(`
|
||||||
db.exec(`
|
CREATE TABLE attachment_downloads_backup_stats (
|
||||||
ALTER TABLE attachment_downloads
|
id INTEGER PRIMARY KEY CHECK (id = 0),
|
||||||
ADD COLUMN originalSource TEXT NOT NULL DEFAULT ${AttachmentDownloadSource.STANDARD};
|
totalBytes INTEGER NOT NULL,
|
||||||
|
completedBytes INTEGER NOT NULL
|
||||||
UPDATE attachment_downloads
|
) STRICT;
|
||||||
SET originalSource = source;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.exec(`
|
INSERT INTO attachment_downloads_backup_stats
|
||||||
CREATE TABLE attachment_downloads_backup_stats (
|
(id, totalBytes, completedBytes)
|
||||||
id INTEGER PRIMARY KEY CHECK (id = 0),
|
VALUES
|
||||||
totalBytes INTEGER NOT NULL,
|
(0, 0, 0);
|
||||||
completedBytes INTEGER NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
INSERT INTO attachment_downloads_backup_stats
|
CREATE TRIGGER attachment_downloads_backup_job_insert
|
||||||
(id, totalBytes, completedBytes)
|
AFTER INSERT ON attachment_downloads
|
||||||
VALUES
|
WHEN NEW.originalSource = 'backup_import'
|
||||||
(0, 0, 0);
|
BEGIN
|
||||||
|
UPDATE attachment_downloads_backup_stats SET
|
||||||
|
totalBytes = totalBytes + NEW.ciphertextSize;
|
||||||
|
END;
|
||||||
|
|
||||||
CREATE TRIGGER attachment_downloads_backup_job_insert
|
CREATE TRIGGER attachment_downloads_backup_job_update
|
||||||
AFTER INSERT ON attachment_downloads
|
AFTER UPDATE OF ciphertextSize ON attachment_downloads
|
||||||
WHEN NEW.originalSource = 'backup_import'
|
WHEN NEW.originalSource = 'backup_import'
|
||||||
BEGIN
|
BEGIN
|
||||||
UPDATE attachment_downloads_backup_stats SET
|
UPDATE attachment_downloads_backup_stats SET
|
||||||
totalBytes = totalBytes + NEW.ciphertextSize;
|
totalBytes = MAX(0, totalBytes - OLD.ciphertextSize + NEW.ciphertextSize)
|
||||||
END;
|
WHERE id = 0;
|
||||||
|
END;
|
||||||
CREATE TRIGGER attachment_downloads_backup_job_update
|
|
||||||
AFTER UPDATE OF ciphertextSize ON attachment_downloads
|
|
||||||
WHEN NEW.originalSource = 'backup_import'
|
|
||||||
BEGIN
|
|
||||||
UPDATE attachment_downloads_backup_stats SET
|
|
||||||
totalBytes = MAX(0, totalBytes - OLD.ciphertextSize + NEW.ciphertextSize)
|
|
||||||
WHERE id = 0;
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER attachment_downloads_backup_job_delete
|
CREATE TRIGGER attachment_downloads_backup_job_delete
|
||||||
AFTER DELETE ON attachment_downloads
|
AFTER DELETE ON attachment_downloads
|
||||||
WHEN OLD.originalSource = 'backup_import'
|
WHEN OLD.originalSource = 'backup_import'
|
||||||
BEGIN
|
BEGIN
|
||||||
UPDATE attachment_downloads_backup_stats SET
|
UPDATE attachment_downloads_backup_stats SET
|
||||||
completedBytes = completedBytes + OLD.ciphertextSize
|
completedBytes = completedBytes + OLD.ciphertextSize
|
||||||
WHERE id = 0;
|
WHERE id = 0;
|
||||||
END;
|
END;
|
||||||
`);
|
`);
|
||||||
|
|
||||||
db.pragma('user_version = 1420');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion1420: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,22 +25,17 @@ export function getOurUuid(db: ReadableDB): string | undefined {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function updateToSchemaVersion41(
|
export default function updateToSchemaVersion41(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 41) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const getConversationUuid = db.prepare(
|
const getConversationUuid = db.prepare(
|
||||||
`
|
`
|
||||||
SELECT uuid
|
SELECT uuid
|
||||||
FROM
|
FROM
|
||||||
conversations
|
conversations
|
||||||
WHERE
|
WHERE
|
||||||
id = $conversationId
|
id = $conversationId
|
||||||
`,
|
`,
|
||||||
{
|
{
|
||||||
pluck: true,
|
pluck: true,
|
||||||
}
|
}
|
||||||
|
@ -377,70 +372,61 @@ export default function updateToSchemaVersion41(
|
||||||
logger.info(`Migrated ${migrated} identity keys`);
|
logger.info(`Migrated ${migrated} identity keys`);
|
||||||
};
|
};
|
||||||
|
|
||||||
db.transaction(() => {
|
db.exec(
|
||||||
db.exec(
|
`
|
||||||
`
|
-- Change type of 'id' column from INTEGER to STRING
|
||||||
-- Change type of 'id' column from INTEGER to STRING
|
|
||||||
|
|
||||||
ALTER TABLE preKeys
|
ALTER TABLE preKeys
|
||||||
RENAME TO old_preKeys;
|
RENAME TO old_preKeys;
|
||||||
|
|
||||||
ALTER TABLE signedPreKeys
|
ALTER TABLE signedPreKeys
|
||||||
RENAME TO old_signedPreKeys;
|
RENAME TO old_signedPreKeys;
|
||||||
|
|
||||||
CREATE TABLE preKeys(
|
CREATE TABLE preKeys(
|
||||||
id STRING PRIMARY KEY ASC,
|
id STRING PRIMARY KEY ASC,
|
||||||
json TEXT
|
json TEXT
|
||||||
);
|
);
|
||||||
CREATE TABLE signedPreKeys(
|
CREATE TABLE signedPreKeys(
|
||||||
id STRING PRIMARY KEY ASC,
|
id STRING PRIMARY KEY ASC,
|
||||||
json TEXT
|
json TEXT
|
||||||
);
|
|
||||||
|
|
||||||
-- sqlite handles the type conversion
|
|
||||||
INSERT INTO preKeys SELECT * FROM old_preKeys;
|
|
||||||
INSERT INTO signedPreKeys SELECT * FROM old_signedPreKeys;
|
|
||||||
|
|
||||||
DROP TABLE old_preKeys;
|
|
||||||
DROP TABLE old_signedPreKeys;
|
|
||||||
|
|
||||||
-- Alter sessions
|
|
||||||
|
|
||||||
ALTER TABLE sessions
|
|
||||||
ADD COLUMN ourUuid STRING;
|
|
||||||
|
|
||||||
ALTER TABLE sessions
|
|
||||||
ADD COLUMN uuid STRING;
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const ourUuid = getOurUuid(db);
|
-- sqlite handles the type conversion
|
||||||
|
INSERT INTO preKeys SELECT * FROM old_preKeys;
|
||||||
|
INSERT INTO signedPreKeys SELECT * FROM old_signedPreKeys;
|
||||||
|
|
||||||
if (!isValidUuid(ourUuid)) {
|
DROP TABLE old_preKeys;
|
||||||
const deleteCount = clearSessionsAndKeys();
|
DROP TABLE old_signedPreKeys;
|
||||||
|
|
||||||
if (deleteCount > 0) {
|
-- Alter sessions
|
||||||
logger.error(
|
|
||||||
'updateToSchemaVersion41: no uuid is available, ' +
|
|
||||||
`erased ${deleteCount} sessions/keys`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 41');
|
ALTER TABLE sessions
|
||||||
return;
|
ADD COLUMN ourUuid STRING;
|
||||||
|
|
||||||
|
ALTER TABLE sessions
|
||||||
|
ADD COLUMN uuid STRING;
|
||||||
|
`
|
||||||
|
);
|
||||||
|
|
||||||
|
const ourUuid = getOurUuid(db);
|
||||||
|
|
||||||
|
if (!isValidUuid(ourUuid)) {
|
||||||
|
const deleteCount = clearSessionsAndKeys();
|
||||||
|
|
||||||
|
if (deleteCount > 0) {
|
||||||
|
logger.error(`no uuid is available, erased ${deleteCount} sessions/keys`);
|
||||||
}
|
}
|
||||||
|
|
||||||
prefixKeys(ourUuid);
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
updateSenderKeys(ourUuid);
|
prefixKeys(ourUuid);
|
||||||
|
|
||||||
updateSessions(ourUuid);
|
updateSenderKeys(ourUuid);
|
||||||
|
|
||||||
moveIdentityKeyToMap(ourUuid);
|
updateSessions(ourUuid);
|
||||||
|
|
||||||
updateIdentityKeys();
|
moveIdentityKeyToMap(ourUuid);
|
||||||
|
|
||||||
db.pragma('user_version = 41');
|
updateIdentityKeys();
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion41: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,75 +6,62 @@ import type { WritableDB } from '../Interface';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
import type { LoggerType } from '../../types/Logging';
|
||||||
|
|
||||||
export default function updateToSchemaVersion42(
|
export default function updateToSchemaVersion42(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 42) {
|
// First, recreate messages table delete trigger with reaction support
|
||||||
return;
|
|
||||||
|
db.exec(`
|
||||||
|
DROP TRIGGER messages_on_delete;
|
||||||
|
|
||||||
|
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
|
||||||
|
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
||||||
|
DELETE FROM sendLogPayloads WHERE id IN (
|
||||||
|
SELECT payloadId FROM sendLogMessageIds
|
||||||
|
WHERE messageId = old.id
|
||||||
|
);
|
||||||
|
DELETE FROM reactions WHERE rowid IN (
|
||||||
|
SELECT rowid FROM reactions
|
||||||
|
WHERE messageId = old.id
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Then, delete previously-orphaned reactions
|
||||||
|
|
||||||
|
// Note: we use `pluck` here to fetch only the first column of
|
||||||
|
// returned row.
|
||||||
|
const messageIdList: Array<string> = db
|
||||||
|
.prepare('SELECT id FROM messages ORDER BY id ASC;', {
|
||||||
|
pluck: true,
|
||||||
|
})
|
||||||
|
.all();
|
||||||
|
const allReactions: Array<{
|
||||||
|
rowid: number;
|
||||||
|
messageId: string;
|
||||||
|
}> = db.prepare('SELECT rowid, messageId FROM reactions;').all();
|
||||||
|
|
||||||
|
const messageIds = new Set(messageIdList);
|
||||||
|
const reactionsToDelete: Array<number> = [];
|
||||||
|
|
||||||
|
allReactions.forEach(reaction => {
|
||||||
|
if (!messageIds.has(reaction.messageId)) {
|
||||||
|
reactionsToDelete.push(reaction.rowid);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function deleteReactions(rowids: ReadonlyArray<number>, persistent: boolean) {
|
||||||
|
db.prepare(
|
||||||
|
`
|
||||||
|
DELETE FROM reactions
|
||||||
|
WHERE rowid IN ( ${rowids.map(() => '?').join(', ')} );
|
||||||
|
`,
|
||||||
|
{ persistent }
|
||||||
|
).run(rowids);
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
if (reactionsToDelete.length > 0) {
|
||||||
// First, recreate messages table delete trigger with reaction support
|
logger.info(`Deleting ${reactionsToDelete.length} orphaned reactions`);
|
||||||
|
batchMultiVarQuery(db, reactionsToDelete, deleteReactions);
|
||||||
db.exec(`
|
}
|
||||||
DROP TRIGGER messages_on_delete;
|
|
||||||
|
|
||||||
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
|
|
||||||
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
|
||||||
DELETE FROM sendLogPayloads WHERE id IN (
|
|
||||||
SELECT payloadId FROM sendLogMessageIds
|
|
||||||
WHERE messageId = old.id
|
|
||||||
);
|
|
||||||
DELETE FROM reactions WHERE rowid IN (
|
|
||||||
SELECT rowid FROM reactions
|
|
||||||
WHERE messageId = old.id
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Then, delete previously-orphaned reactions
|
|
||||||
|
|
||||||
// Note: we use `pluck` here to fetch only the first column of
|
|
||||||
// returned row.
|
|
||||||
const messageIdList: Array<string> = db
|
|
||||||
.prepare('SELECT id FROM messages ORDER BY id ASC;', {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.all();
|
|
||||||
const allReactions: Array<{
|
|
||||||
rowid: number;
|
|
||||||
messageId: string;
|
|
||||||
}> = db.prepare('SELECT rowid, messageId FROM reactions;').all();
|
|
||||||
|
|
||||||
const messageIds = new Set(messageIdList);
|
|
||||||
const reactionsToDelete: Array<number> = [];
|
|
||||||
|
|
||||||
allReactions.forEach(reaction => {
|
|
||||||
if (!messageIds.has(reaction.messageId)) {
|
|
||||||
reactionsToDelete.push(reaction.rowid);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function deleteReactions(
|
|
||||||
rowids: ReadonlyArray<number>,
|
|
||||||
persistent: boolean
|
|
||||||
) {
|
|
||||||
db.prepare(
|
|
||||||
`
|
|
||||||
DELETE FROM reactions
|
|
||||||
WHERE rowid IN ( ${rowids.map(() => '?').join(', ')} );
|
|
||||||
`,
|
|
||||||
{ persistent }
|
|
||||||
).run(rowids);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (reactionsToDelete.length > 0) {
|
|
||||||
logger.info(`Deleting ${reactionsToDelete.length} orphaned reactions`);
|
|
||||||
batchMultiVarQuery(db, reactionsToDelete, deleteReactions);
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 42');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion42: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,14 +33,9 @@ type ConversationType = Readonly<{
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
export default function updateToSchemaVersion43(
|
export default function updateToSchemaVersion43(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 43) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
type LegacyPendingMemberType = {
|
type LegacyPendingMemberType = {
|
||||||
addedByUserId?: string;
|
addedByUserId?: string;
|
||||||
conversationId: string;
|
conversationId: string;
|
||||||
|
@ -117,8 +112,7 @@ export default function updateToSchemaVersion43(
|
||||||
});
|
});
|
||||||
if (!uuid) {
|
if (!uuid) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`updateToSchemaVersion43: ${logId}.${key} UUID not found ` +
|
`${logId}.${key} UUID not found for ${member.conversationId}`
|
||||||
`for ${member.conversationId}`
|
|
||||||
);
|
);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -158,15 +152,14 @@ export default function updateToSchemaVersion43(
|
||||||
|
|
||||||
if (oldValue.length !== 0) {
|
if (oldValue.length !== 0) {
|
||||||
logger.info(
|
logger.info(
|
||||||
`updateToSchemaVersion43: migrated ${oldValue.length} ${key} ` +
|
`migrated ${oldValue.length} ${key} ` +
|
||||||
`entries to ${newValue.length} for ${logId}`
|
`entries to ${newValue.length} for ${logId}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (addedByCount > 0) {
|
if (addedByCount > 0) {
|
||||||
logger.info(
|
logger.info(
|
||||||
`updateToSchemaVersion43: migrated ${addedByCount} addedByUserId ` +
|
`migrated ${addedByCount} addedByUserId in ${key} for ${logId}`
|
||||||
`in ${key} for ${logId}`
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -286,7 +279,7 @@ export default function updateToSchemaVersion43(
|
||||||
}
|
}
|
||||||
if (!newValue) {
|
if (!newValue) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`updateToSchemaVersion43: ${id}.groupV2Change.details.${key} ` +
|
`${id}.groupV2Change.details.${key} ` +
|
||||||
`UUID not found for ${oldValue}`
|
`UUID not found for ${oldValue}`
|
||||||
);
|
);
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -342,7 +335,7 @@ export default function updateToSchemaVersion43(
|
||||||
|
|
||||||
if (!uuid) {
|
if (!uuid) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`updateToSchemaVersion43: ${id}.invitedGV2Members UUID ` +
|
`${id}.invitedGV2Members UUID ` +
|
||||||
`not found for ${conversationId}`
|
`not found for ${conversationId}`
|
||||||
);
|
);
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -390,44 +383,35 @@ export default function updateToSchemaVersion43(
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
db.transaction(() => {
|
const allConversations = db
|
||||||
const allConversations = db
|
.prepare(
|
||||||
.prepare(
|
`
|
||||||
`
|
SELECT json
|
||||||
SELECT json
|
FROM conversations
|
||||||
FROM conversations
|
ORDER BY id ASC;
|
||||||
ORDER BY id ASC;
|
`,
|
||||||
`,
|
{ pluck: true }
|
||||||
{ pluck: true }
|
)
|
||||||
)
|
.all<string>()
|
||||||
.all<string>()
|
.map(json => jsonToObject<ConversationType>(json));
|
||||||
.map(json => jsonToObject<ConversationType>(json));
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'updateToSchemaVersion43: About to iterate through ' +
|
`About to iterate through ${allConversations.length} conversations`
|
||||||
`${allConversations.length} conversations`
|
);
|
||||||
);
|
|
||||||
|
|
||||||
for (const convo of allConversations) {
|
for (const convo of allConversations) {
|
||||||
upgradeConversation(convo);
|
upgradeConversation(convo);
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageCount = getCountFromTable(db, 'messages');
|
||||||
|
logger.info(`About to iterate through ${messageCount} messages`);
|
||||||
|
|
||||||
|
let updatedCount = 0;
|
||||||
|
for (const message of new TableIterator<MessageType>(db, 'messages')) {
|
||||||
|
if (upgradeMessage(message)) {
|
||||||
|
updatedCount += 1;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const messageCount = getCountFromTable(db, 'messages');
|
logger.info(`Updated ${updatedCount} messages`);
|
||||||
logger.info(
|
|
||||||
'updateToSchemaVersion43: About to iterate through ' +
|
|
||||||
`${messageCount} messages`
|
|
||||||
);
|
|
||||||
|
|
||||||
let updatedCount = 0;
|
|
||||||
for (const message of new TableIterator<MessageType>(db, 'messages')) {
|
|
||||||
if (upgradeMessage(message)) {
|
|
||||||
updatedCount += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion43: Updated ${updatedCount} messages`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 43');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion43: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,41 +3,25 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion44(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion44(
|
`
|
||||||
currentVersion: number,
|
CREATE TABLE badges(
|
||||||
db: Database,
|
id TEXT PRIMARY KEY,
|
||||||
logger: LoggerType
|
category TEXT NOT NULL,
|
||||||
): void {
|
name TEXT NOT NULL,
|
||||||
if (currentVersion >= 44) {
|
descriptionTemplate TEXT NOT NULL
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
CREATE TABLE badges(
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
category TEXT NOT NULL,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
descriptionTemplate TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE badgeImageFiles(
|
|
||||||
badgeId TEXT REFERENCES badges(id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
ON UPDATE CASCADE,
|
|
||||||
'order' INTEGER NOT NULL,
|
|
||||||
url TEXT NOT NULL,
|
|
||||||
localPath TEXT,
|
|
||||||
theme TEXT NOT NULL
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 44');
|
CREATE TABLE badgeImageFiles(
|
||||||
})();
|
badgeId TEXT REFERENCES badges(id)
|
||||||
|
ON DELETE CASCADE
|
||||||
logger.info('updateToSchemaVersion44: success!');
|
ON UPDATE CASCADE,
|
||||||
|
'order' INTEGER NOT NULL,
|
||||||
|
url TEXT NOT NULL,
|
||||||
|
localPath TEXT,
|
||||||
|
theme TEXT NOT NULL
|
||||||
|
);
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,135 +3,119 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion45(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- Add column to messages table
|
||||||
|
|
||||||
export default function updateToSchemaVersion45(
|
ALTER TABLE messages ADD COLUMN storyId STRING;
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 45) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Update important message indices
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
--- Add column to messages table
|
|
||||||
|
|
||||||
ALTER TABLE messages ADD COLUMN storyId STRING;
|
DROP INDEX messages_conversation;
|
||||||
|
CREATE INDEX messages_conversation ON messages
|
||||||
|
(conversationId, type, storyId, received_at);
|
||||||
|
|
||||||
--- Update important message indices
|
DROP INDEX messages_unread;
|
||||||
|
CREATE INDEX messages_unread ON messages
|
||||||
|
(conversationId, readStatus, type, storyId) WHERE readStatus IS NOT NULL;
|
||||||
|
|
||||||
DROP INDEX messages_conversation;
|
--- Update attachment indices for All Media views
|
||||||
CREATE INDEX messages_conversation ON messages
|
|
||||||
(conversationId, type, storyId, received_at);
|
|
||||||
|
|
||||||
DROP INDEX messages_unread;
|
DROP INDEX messages_hasAttachments;
|
||||||
CREATE INDEX messages_unread ON messages
|
CREATE INDEX messages_hasAttachments
|
||||||
(conversationId, readStatus, type, storyId) WHERE readStatus IS NOT NULL;
|
ON messages (conversationId, hasAttachments, received_at)
|
||||||
|
WHERE type IS NOT 'story' AND storyId IS NULL;
|
||||||
|
|
||||||
--- Update attachment indices for All Media views
|
DROP INDEX messages_hasFileAttachments;
|
||||||
|
CREATE INDEX messages_hasFileAttachments
|
||||||
|
ON messages (conversationId, hasFileAttachments, received_at)
|
||||||
|
WHERE type IS NOT 'story' AND storyId IS NULL;
|
||||||
|
|
||||||
DROP INDEX messages_hasAttachments;
|
DROP INDEX messages_hasVisualMediaAttachments;
|
||||||
CREATE INDEX messages_hasAttachments
|
CREATE INDEX messages_hasVisualMediaAttachments
|
||||||
ON messages (conversationId, hasAttachments, received_at)
|
ON messages (conversationId, hasVisualMediaAttachments, received_at)
|
||||||
WHERE type IS NOT 'story' AND storyId IS NULL;
|
WHERE type IS NOT 'story' AND storyId IS NULL;
|
||||||
|
|
||||||
DROP INDEX messages_hasFileAttachments;
|
--- Message insert/update triggers to exclude stories and story replies
|
||||||
CREATE INDEX messages_hasFileAttachments
|
|
||||||
ON messages (conversationId, hasFileAttachments, received_at)
|
|
||||||
WHERE type IS NOT 'story' AND storyId IS NULL;
|
|
||||||
|
|
||||||
DROP INDEX messages_hasVisualMediaAttachments;
|
DROP TRIGGER messages_on_insert;
|
||||||
CREATE INDEX messages_hasVisualMediaAttachments
|
-- Note: any changes to this trigger must be reflected in
|
||||||
ON messages (conversationId, hasVisualMediaAttachments, received_at)
|
-- Server.ts: enableMessageInsertTriggersAndBackfill
|
||||||
WHERE type IS NOT 'story' AND storyId IS NULL;
|
CREATE TRIGGER messages_on_insert AFTER INSERT ON messages
|
||||||
|
WHEN new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO messages_fts
|
||||||
|
(rowid, body)
|
||||||
|
VALUES
|
||||||
|
(new.rowid, new.body);
|
||||||
|
END;
|
||||||
|
|
||||||
--- Message insert/update triggers to exclude stories and story replies
|
DROP TRIGGER messages_on_update;
|
||||||
|
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
|
||||||
|
WHEN
|
||||||
|
(new.body IS NULL OR old.body IS NOT new.body) AND
|
||||||
|
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
||||||
|
INSERT INTO messages_fts
|
||||||
|
(rowid, body)
|
||||||
|
VALUES
|
||||||
|
(new.rowid, new.body);
|
||||||
|
END;
|
||||||
|
|
||||||
DROP TRIGGER messages_on_insert;
|
--- Update delete trigger to remove storyReads
|
||||||
-- Note: any changes to this trigger must be reflected in
|
|
||||||
-- Server.ts: enableMessageInsertTriggersAndBackfill
|
|
||||||
CREATE TRIGGER messages_on_insert AFTER INSERT ON messages
|
|
||||||
WHEN new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO messages_fts
|
|
||||||
(rowid, body)
|
|
||||||
VALUES
|
|
||||||
(new.rowid, new.body);
|
|
||||||
END;
|
|
||||||
|
|
||||||
DROP TRIGGER messages_on_update;
|
--- Note: for future updates to this trigger, be sure to update Server.ts/removeAll()
|
||||||
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
|
--- (it deletes and re-adds this trigger for performance)
|
||||||
WHEN
|
DROP TRIGGER messages_on_delete;
|
||||||
(new.body IS NULL OR old.body IS NOT new.body) AND
|
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
|
||||||
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
||||||
BEGIN
|
DELETE FROM sendLogPayloads WHERE id IN (
|
||||||
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
SELECT payloadId FROM sendLogMessageIds
|
||||||
INSERT INTO messages_fts
|
WHERE messageId = old.id
|
||||||
(rowid, body)
|
|
||||||
VALUES
|
|
||||||
(new.rowid, new.body);
|
|
||||||
END;
|
|
||||||
|
|
||||||
--- Update delete trigger to remove storyReads
|
|
||||||
|
|
||||||
--- Note: for future updates to this trigger, be sure to update Server.ts/removeAll()
|
|
||||||
--- (it deletes and re-adds this trigger for performance)
|
|
||||||
DROP TRIGGER messages_on_delete;
|
|
||||||
CREATE TRIGGER messages_on_delete AFTER DELETE ON messages BEGIN
|
|
||||||
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
|
||||||
DELETE FROM sendLogPayloads WHERE id IN (
|
|
||||||
SELECT payloadId FROM sendLogMessageIds
|
|
||||||
WHERE messageId = old.id
|
|
||||||
);
|
|
||||||
DELETE FROM reactions WHERE rowid IN (
|
|
||||||
SELECT rowid FROM reactions
|
|
||||||
WHERE messageId = old.id
|
|
||||||
);
|
|
||||||
DELETE FROM storyReads WHERE storyId = old.storyId;
|
|
||||||
END;
|
|
||||||
|
|
||||||
--- Story Read History
|
|
||||||
|
|
||||||
CREATE TABLE storyReads (
|
|
||||||
authorId STRING NOT NULL,
|
|
||||||
conversationId STRING NOT NULL,
|
|
||||||
storyId STRING NOT NULL,
|
|
||||||
storyReadDate NUMBER NOT NULL,
|
|
||||||
|
|
||||||
PRIMARY KEY (authorId, storyId)
|
|
||||||
);
|
);
|
||||||
|
DELETE FROM reactions WHERE rowid IN (
|
||||||
CREATE INDEX storyReads_data ON storyReads (
|
SELECT rowid FROM reactions
|
||||||
storyReadDate, authorId, conversationId
|
WHERE messageId = old.id
|
||||||
);
|
);
|
||||||
|
DELETE FROM storyReads WHERE storyId = old.storyId;
|
||||||
|
END;
|
||||||
|
|
||||||
--- Story Distribution Lists
|
--- Story Read History
|
||||||
|
|
||||||
CREATE TABLE storyDistributions(
|
CREATE TABLE storyReads (
|
||||||
id STRING PRIMARY KEY NOT NULL,
|
authorId STRING NOT NULL,
|
||||||
name TEXT,
|
conversationId STRING NOT NULL,
|
||||||
|
storyId STRING NOT NULL,
|
||||||
|
storyReadDate NUMBER NOT NULL,
|
||||||
|
|
||||||
avatarUrlPath TEXT,
|
PRIMARY KEY (authorId, storyId)
|
||||||
avatarKey BLOB,
|
|
||||||
senderKeyInfoJson STRING
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE storyDistributionMembers(
|
|
||||||
listId STRING NOT NULL REFERENCES storyDistributions(id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
ON UPDATE CASCADE,
|
|
||||||
uuid STRING NOT NULL,
|
|
||||||
|
|
||||||
PRIMARY KEY (listId, uuid)
|
|
||||||
)
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 45');
|
CREATE INDEX storyReads_data ON storyReads (
|
||||||
})();
|
storyReadDate, authorId, conversationId
|
||||||
|
);
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion45: success!');
|
--- Story Distribution Lists
|
||||||
|
|
||||||
|
CREATE TABLE storyDistributions(
|
||||||
|
id STRING PRIMARY KEY NOT NULL,
|
||||||
|
name TEXT,
|
||||||
|
|
||||||
|
avatarUrlPath TEXT,
|
||||||
|
avatarKey BLOB,
|
||||||
|
senderKeyInfoJson STRING
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE storyDistributionMembers(
|
||||||
|
listId STRING NOT NULL REFERENCES storyDistributions(id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
uuid STRING NOT NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY (listId, uuid)
|
||||||
|
)
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,38 +3,22 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion46(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- Add column to messages table
|
||||||
|
|
||||||
export default function updateToSchemaVersion46(
|
ALTER TABLE messages
|
||||||
currentVersion: number,
|
ADD COLUMN
|
||||||
db: Database,
|
isStory INTEGER
|
||||||
logger: LoggerType
|
GENERATED ALWAYS
|
||||||
): void {
|
AS (type = 'story');
|
||||||
if (currentVersion >= 46) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Update important message indices
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
--- Add column to messages table
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
DROP INDEX messages_conversation;
|
||||||
ADD COLUMN
|
CREATE INDEX messages_conversation ON messages
|
||||||
isStory INTEGER
|
(conversationId, isStory, storyId, received_at, sent_at);
|
||||||
GENERATED ALWAYS
|
`
|
||||||
AS (type = 'story');
|
);
|
||||||
|
|
||||||
--- Update important message indices
|
|
||||||
|
|
||||||
DROP INDEX messages_conversation;
|
|
||||||
CREATE INDEX messages_conversation ON messages
|
|
||||||
(conversationId, isStory, storyId, received_at, sent_at);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 46');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion46: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,139 +1,128 @@
|
||||||
// Copyright 2021 Signal Messenger, LLC
|
// Copyright 2021 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { getOurUuid } from './41-uuid-keys';
|
import { getOurUuid } from './41-uuid-keys';
|
||||||
|
import type { LoggerType } from '../../types/Logging';
|
||||||
import type { WritableDB } from '../Interface';
|
import type { WritableDB } from '../Interface';
|
||||||
|
|
||||||
export default function updateToSchemaVersion47(
|
export default function updateToSchemaVersion47(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 47) {
|
db.exec(
|
||||||
return;
|
`
|
||||||
|
DROP INDEX messages_conversation;
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
DROP COLUMN isStory;
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isStory INTEGER
|
||||||
|
GENERATED ALWAYS AS (type IS 'story');
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isChangeCreatedByUs INTEGER NOT NULL DEFAULT 0;
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN shouldAffectActivity INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'group-v1-migration',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change',
|
||||||
|
|
||||||
|
'keychange'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN shouldAffectPreview INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'group-v1-migration',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isUserInitiatedMessage INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'group-v1-migration',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change',
|
||||||
|
|
||||||
|
'group-v2-change',
|
||||||
|
'keychange'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isTimerChangeFromSync INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
json_extract(json, '$.expirationTimerUpdate.fromSync') IS 1
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isGroupLeaveEvent INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS 'group-v2-change' AND
|
||||||
|
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
|
||||||
|
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
|
||||||
|
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
|
||||||
|
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].uuid')
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN isGroupLeaveEventFromOther INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
isGroupLeaveEvent IS 1
|
||||||
|
AND
|
||||||
|
isChangeCreatedByUs IS 0
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX messages_conversation ON messages
|
||||||
|
(conversationId, isStory, storyId, received_at, sent_at);
|
||||||
|
|
||||||
|
CREATE INDEX messages_preview ON messages
|
||||||
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
||||||
|
|
||||||
|
CREATE INDEX messages_activity ON messages
|
||||||
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
|
|
||||||
|
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
||||||
|
`
|
||||||
|
);
|
||||||
|
|
||||||
|
const ourUuid = getOurUuid(db);
|
||||||
|
if (!ourUuid) {
|
||||||
|
logger.info('our UUID not found');
|
||||||
|
} else {
|
||||||
|
db.prepare(
|
||||||
|
`
|
||||||
|
UPDATE messages SET
|
||||||
|
isChangeCreatedByUs = json_extract(json, '$.groupV2Change.from') IS $ourUuid;
|
||||||
|
`
|
||||||
|
).run({
|
||||||
|
ourUuid,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DROP INDEX messages_conversation;
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
DROP COLUMN isStory;
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isStory INTEGER
|
|
||||||
GENERATED ALWAYS AS (type IS 'story');
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isChangeCreatedByUs INTEGER NOT NULL DEFAULT 0;
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN shouldAffectActivity INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'group-v1-migration',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change',
|
|
||||||
|
|
||||||
'keychange'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN shouldAffectPreview INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'group-v1-migration',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isUserInitiatedMessage INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'group-v1-migration',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change',
|
|
||||||
|
|
||||||
'group-v2-change',
|
|
||||||
'keychange'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isTimerChangeFromSync INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
json_extract(json, '$.expirationTimerUpdate.fromSync') IS 1
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isGroupLeaveEvent INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS 'group-v2-change' AND
|
|
||||||
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
|
|
||||||
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
|
|
||||||
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
|
|
||||||
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].uuid')
|
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isGroupLeaveEventFromOther INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
isGroupLeaveEvent IS 1
|
|
||||||
AND
|
|
||||||
isChangeCreatedByUs IS 0
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX messages_conversation ON messages
|
|
||||||
(conversationId, isStory, storyId, received_at, sent_at);
|
|
||||||
|
|
||||||
CREATE INDEX messages_preview ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
|
||||||
|
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
const ourUuid = getOurUuid(db);
|
|
||||||
if (!ourUuid) {
|
|
||||||
logger.info('updateToSchemaVersion47: our UUID not found');
|
|
||||||
} else {
|
|
||||||
db.prepare(
|
|
||||||
`
|
|
||||||
UPDATE messages SET
|
|
||||||
isChangeCreatedByUs = json_extract(json, '$.groupV2Change.from') IS $ourUuid;
|
|
||||||
`
|
|
||||||
).run({
|
|
||||||
ourUuid,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 47');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion47: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,28 +3,12 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion48(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
DROP INDEX message_user_initiated;
|
||||||
|
|
||||||
export default function updateToSchemaVersion48(
|
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
||||||
currentVersion: number,
|
`
|
||||||
db: Database,
|
);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 48) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DROP INDEX message_user_initiated;
|
|
||||||
|
|
||||||
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 48');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion48: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,31 +3,15 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion49(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
DROP INDEX messages_preview;
|
||||||
|
|
||||||
export default function updateToSchemaVersion49(
|
-- Note the omitted 'expiresAt' column in the index. If it is present
|
||||||
currentVersion: number,
|
-- sqlite can't ORDER BY received_at, sent_at using this index.
|
||||||
db: Database,
|
CREATE INDEX messages_preview ON messages
|
||||||
logger: LoggerType
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
): void {
|
`
|
||||||
if (currentVersion >= 49) {
|
);
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DROP INDEX messages_preview;
|
|
||||||
|
|
||||||
-- Note the omitted 'expiresAt' column in the index. If it is present
|
|
||||||
-- sqlite can't ORDER BY received_at, sent_at using this index.
|
|
||||||
CREATE INDEX messages_preview ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 49');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion49: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,30 +3,14 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion50(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
DROP INDEX messages_unread;
|
||||||
|
|
||||||
export default function updateToSchemaVersion50(
|
-- Note: here we move to the modern isStory/storyId fields and add received_at/sent_at.
|
||||||
currentVersion: number,
|
CREATE INDEX messages_unread ON messages
|
||||||
db: Database,
|
(conversationId, readStatus, isStory, storyId, received_at, sent_at) WHERE readStatus IS NOT NULL;
|
||||||
logger: LoggerType
|
`
|
||||||
): void {
|
);
|
||||||
if (currentVersion >= 50) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DROP INDEX messages_unread;
|
|
||||||
|
|
||||||
-- Note: here we move to the modern isStory/storyId fields and add received_at/sent_at.
|
|
||||||
CREATE INDEX messages_unread ON messages
|
|
||||||
(conversationId, readStatus, isStory, storyId, received_at, sent_at) WHERE readStatus IS NOT NULL;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 50');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion50: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,102 +7,81 @@ import type { WritableDB } from '../Interface';
|
||||||
import { getJobsInQueue, insertJob } from '../Server';
|
import { getJobsInQueue, insertJob } from '../Server';
|
||||||
|
|
||||||
export default function updateToSchemaVersion51(
|
export default function updateToSchemaVersion51(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 51) {
|
const deleteJobsInQueue = db.prepare(
|
||||||
return;
|
'DELETE FROM jobs WHERE queueType = $queueType'
|
||||||
}
|
);
|
||||||
|
|
||||||
db.transaction(() => {
|
// First, make sure that reactions job data has a type and conversationId
|
||||||
const deleteJobsInQueue = db.prepare(
|
const reactionsJobs = getJobsInQueue(db, 'reactions');
|
||||||
'DELETE FROM jobs WHERE queueType = $queueType'
|
deleteJobsInQueue.run({ queueType: 'reactions' });
|
||||||
);
|
|
||||||
|
|
||||||
// First, make sure that reactions job data has a type and conversationId
|
const getMessageById = db.prepare(
|
||||||
const reactionsJobs = getJobsInQueue(db, 'reactions');
|
'SELECT conversationId FROM messages WHERE id IS ?'
|
||||||
deleteJobsInQueue.run({ queueType: 'reactions' });
|
);
|
||||||
|
|
||||||
const getMessageById = db.prepare(
|
reactionsJobs.forEach(job => {
|
||||||
'SELECT conversationId FROM messages WHERE id IS ?'
|
const { data, id } = job;
|
||||||
);
|
|
||||||
|
|
||||||
reactionsJobs.forEach(job => {
|
if (!isRecord(data)) {
|
||||||
const { data, id } = job;
|
logger.warn(`reactions queue job ${id} was missing valid data`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!isRecord(data)) {
|
const { messageId } = data;
|
||||||
logger.warn(
|
if (typeof messageId !== 'string') {
|
||||||
`updateToSchemaVersion51: reactions queue job ${id} was missing valid data`
|
logger.warn(`reactions queue job ${id} had a non-string messageId`);
|
||||||
);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const { messageId } = data;
|
const message = getMessageById.get([messageId]);
|
||||||
if (typeof messageId !== 'string') {
|
if (!message) {
|
||||||
logger.warn(
|
logger.warn(`Unable to find message for reaction job ${id}`);
|
||||||
`updateToSchemaVersion51: reactions queue job ${id} had a non-string messageId`
|
return;
|
||||||
);
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const message = getMessageById.get([messageId]);
|
const { conversationId } = message;
|
||||||
if (!message) {
|
if (typeof conversationId !== 'string') {
|
||||||
logger.warn(
|
logger.warn(`reactions queue job ${id} had a non-string conversationId`);
|
||||||
`updateToSchemaVersion51: Unable to find message for reaction job ${id}`
|
return;
|
||||||
);
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { conversationId } = message;
|
const newJob = {
|
||||||
if (typeof conversationId !== 'string') {
|
...job,
|
||||||
logger.warn(
|
queueType: 'conversation',
|
||||||
`updateToSchemaVersion51: reactions queue job ${id} had a non-string conversationId`
|
data: {
|
||||||
);
|
...data,
|
||||||
return;
|
type: 'Reaction',
|
||||||
}
|
conversationId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const newJob = {
|
insertJob(db, newJob);
|
||||||
...job,
|
});
|
||||||
queueType: 'conversation',
|
|
||||||
data: {
|
|
||||||
...data,
|
|
||||||
type: 'Reaction',
|
|
||||||
conversationId,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
insertJob(db, newJob);
|
// Then make sure all normal send job data has a type
|
||||||
});
|
const normalSendJobs = getJobsInQueue(db, 'normal send');
|
||||||
|
deleteJobsInQueue.run({ queueType: 'normal send' });
|
||||||
|
|
||||||
// Then make sure all normal send job data has a type
|
normalSendJobs.forEach(job => {
|
||||||
const normalSendJobs = getJobsInQueue(db, 'normal send');
|
const { data, id } = job;
|
||||||
deleteJobsInQueue.run({ queueType: 'normal send' });
|
|
||||||
|
|
||||||
normalSendJobs.forEach(job => {
|
if (!isRecord(data)) {
|
||||||
const { data, id } = job;
|
logger.warn(`normal send queue job ${id} was missing valid data`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!isRecord(data)) {
|
const newJob = {
|
||||||
logger.warn(
|
...job,
|
||||||
`updateToSchemaVersion51: normal send queue job ${id} was missing valid data`
|
queueType: 'conversation',
|
||||||
);
|
data: {
|
||||||
return;
|
...data,
|
||||||
}
|
type: 'NormalMessage',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const newJob = {
|
insertJob(db, newJob);
|
||||||
...job,
|
});
|
||||||
queueType: 'conversation',
|
|
||||||
data: {
|
|
||||||
...data,
|
|
||||||
type: 'NormalMessage',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
insertJob(db, newJob);
|
|
||||||
});
|
|
||||||
|
|
||||||
db.pragma('user_version = 51');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion51: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,36 +3,20 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion52(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
-- Create indices that don't have storyId in them so that
|
||||||
|
-- '_storyIdPredicate' could be optimized.
|
||||||
|
|
||||||
export default function updateToSchemaVersion52(
|
-- See migration 47
|
||||||
currentVersion: number,
|
CREATE INDEX messages_conversation_no_story_id ON messages
|
||||||
db: Database,
|
(conversationId, isStory, received_at, sent_at);
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 52) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
-- See migration 50
|
||||||
db.exec(
|
CREATE INDEX messages_unread_no_story_id ON messages
|
||||||
`
|
(conversationId, readStatus, isStory, received_at, sent_at)
|
||||||
-- Create indices that don't have storyId in them so that
|
WHERE readStatus IS NOT NULL;
|
||||||
-- '_storyIdPredicate' could be optimized.
|
`
|
||||||
|
);
|
||||||
-- See migration 47
|
|
||||||
CREATE INDEX messages_conversation_no_story_id ON messages
|
|
||||||
(conversationId, isStory, received_at, sent_at);
|
|
||||||
|
|
||||||
-- See migration 50
|
|
||||||
CREATE INDEX messages_unread_no_story_id ON messages
|
|
||||||
(conversationId, readStatus, isStory, received_at, sent_at)
|
|
||||||
WHERE readStatus IS NOT NULL;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 52');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion52: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,14 +7,9 @@ import type { LoggerType } from '../../types/Logging';
|
||||||
import { jsonToObject } from '../util';
|
import { jsonToObject } from '../util';
|
||||||
|
|
||||||
export default function updateToSchemaVersion53(
|
export default function updateToSchemaVersion53(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 53) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
type LegacyConversationType = {
|
type LegacyConversationType = {
|
||||||
id: string;
|
id: string;
|
||||||
groupId: string;
|
groupId: string;
|
||||||
|
@ -52,7 +47,7 @@ export default function updateToSchemaVersion53(
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`updateToSchemaVersion53: Updating ${logId} with ` +
|
`Updating ${logId} with ` +
|
||||||
`${legacy.bannedMembersV2.length} banned members`
|
`${legacy.bannedMembersV2.length} banned members`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -64,33 +59,27 @@ export default function updateToSchemaVersion53(
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
db.transaction(() => {
|
const allConversations = db
|
||||||
const allConversations = db
|
.prepare(
|
||||||
.prepare(
|
`
|
||||||
`
|
SELECT json
|
||||||
SELECT json
|
FROM conversations
|
||||||
FROM conversations
|
WHERE type = 'group'
|
||||||
WHERE type = 'group'
|
ORDER BY id ASC;
|
||||||
ORDER BY id ASC;
|
`,
|
||||||
`,
|
{ pluck: true }
|
||||||
{ pluck: true }
|
)
|
||||||
)
|
.all<string>()
|
||||||
.all<string>()
|
.map(json => jsonToObject<ConversationType>(json));
|
||||||
.map(json => jsonToObject<ConversationType>(json));
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'updateToSchemaVersion53: About to iterate through ' +
|
`About to iterate through ${allConversations.length} conversations`
|
||||||
`${allConversations.length} conversations`
|
);
|
||||||
);
|
|
||||||
|
|
||||||
let updated = 0;
|
let updated = 0;
|
||||||
for (const convo of allConversations) {
|
for (const convo of allConversations) {
|
||||||
updated += upgradeConversation(convo) ? 1 : 0;
|
updated += upgradeConversation(convo) ? 1 : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion53: Updated ${updated} conversations`);
|
logger.info(`Updated ${updated} conversations`);
|
||||||
|
|
||||||
db.pragma('user_version = 53');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion53: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,25 +3,10 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion54(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion54(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE unprocessed ADD COLUMN receivedAtCounter INTEGER;
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
);
|
||||||
): void {
|
|
||||||
if (currentVersion >= 54) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE unprocessed ADD COLUMN receivedAtCounter INTEGER;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 54');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion54: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,61 +8,45 @@ import { isRecord } from '../../util/isRecord';
|
||||||
import { isIterable } from '../../util/iterables';
|
import { isIterable } from '../../util/iterables';
|
||||||
|
|
||||||
export default function updateToSchemaVersion55(
|
export default function updateToSchemaVersion55(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 55) {
|
const deleteJobsInQueue = db.prepare(
|
||||||
return;
|
'DELETE FROM jobs WHERE queueType = $queueType'
|
||||||
}
|
);
|
||||||
|
|
||||||
db.transaction(() => {
|
// First, make sure that report spam job data has e164 and serverGuids
|
||||||
const deleteJobsInQueue = db.prepare(
|
const reportSpamJobs = getJobsInQueue(db, 'report spam');
|
||||||
'DELETE FROM jobs WHERE queueType = $queueType'
|
deleteJobsInQueue.run({ queueType: 'report spam' });
|
||||||
);
|
|
||||||
|
|
||||||
// First, make sure that report spam job data has e164 and serverGuids
|
reportSpamJobs.forEach(job => {
|
||||||
const reportSpamJobs = getJobsInQueue(db, 'report spam');
|
const { data, id } = job;
|
||||||
deleteJobsInQueue.run({ queueType: 'report spam' });
|
|
||||||
|
|
||||||
reportSpamJobs.forEach(job => {
|
if (!isRecord(data)) {
|
||||||
const { data, id } = job;
|
logger.warn(`report spam queue job ${id} was missing valid data`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!isRecord(data)) {
|
const { e164, serverGuids } = data;
|
||||||
logger.warn(
|
if (typeof e164 !== 'string') {
|
||||||
`updateToSchemaVersion55: report spam queue job ${id} was missing valid data`
|
logger.warn(`report spam queue job ${id} had a non-string e164`);
|
||||||
);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const { e164, serverGuids } = data;
|
if (!isIterable(serverGuids)) {
|
||||||
if (typeof e164 !== 'string') {
|
logger.warn(`report spam queue job ${id} had a non-iterable serverGuids`);
|
||||||
logger.warn(
|
return;
|
||||||
`updateToSchemaVersion55: report spam queue job ${id} had a non-string e164`
|
}
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isIterable(serverGuids)) {
|
const newJob = {
|
||||||
logger.warn(
|
...job,
|
||||||
`updateToSchemaVersion55: report spam queue job ${id} had a non-iterable serverGuids`
|
queueType: 'report spam',
|
||||||
);
|
data: {
|
||||||
return;
|
uuid: e164, // this looks odd, but they are both strings and interchangeable in the server API
|
||||||
}
|
serverGuids,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const newJob = {
|
insertJob(db, newJob);
|
||||||
...job,
|
});
|
||||||
queueType: 'report spam',
|
|
||||||
data: {
|
|
||||||
uuid: e164, // this looks odd, but they are both strings and interchangeable in the server API
|
|
||||||
serverGuids,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
insertJob(db, newJob);
|
|
||||||
});
|
|
||||||
|
|
||||||
db.pragma('user_version = 55');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion55: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,69 +5,37 @@ import type { Database } from '@signalapp/sqlcipher';
|
||||||
import { ReadStatus } from '../../messages/MessageReadStatus';
|
import { ReadStatus } from '../../messages/MessageReadStatus';
|
||||||
import { SeenStatus } from '../../MessageSeenStatus';
|
import { SeenStatus } from '../../MessageSeenStatus';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion56(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- Add column to messages table
|
||||||
|
|
||||||
export default function updateToSchemaVersion56(
|
ALTER TABLE messages ADD COLUMN seenStatus NUMBER default 0;
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 56) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Add index to make searching on this field easy
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
--- Add column to messages table
|
|
||||||
|
|
||||||
ALTER TABLE messages ADD COLUMN seenStatus NUMBER default 0;
|
CREATE INDEX messages_unseen_no_story ON messages
|
||||||
|
(conversationId, seenStatus, isStory, received_at, sent_at)
|
||||||
|
WHERE
|
||||||
|
seenStatus IS NOT NULL;
|
||||||
|
|
||||||
--- Add index to make searching on this field easy
|
CREATE INDEX messages_unseen_with_story ON messages
|
||||||
|
(conversationId, seenStatus, isStory, storyId, received_at, sent_at)
|
||||||
|
WHERE
|
||||||
|
seenStatus IS NOT NULL;
|
||||||
|
|
||||||
CREATE INDEX messages_unseen_no_story ON messages
|
--- Update seenStatus to UnseenStatus.Unseen for certain messages
|
||||||
(conversationId, seenStatus, isStory, received_at, sent_at)
|
--- (NULL included because 'timer-notification' in 1:1 convos had type = NULL)
|
||||||
WHERE
|
|
||||||
seenStatus IS NOT NULL;
|
|
||||||
|
|
||||||
CREATE INDEX messages_unseen_with_story ON messages
|
UPDATE messages
|
||||||
(conversationId, seenStatus, isStory, storyId, received_at, sent_at)
|
SET
|
||||||
WHERE
|
seenStatus = ${SeenStatus.Unseen}
|
||||||
seenStatus IS NOT NULL;
|
WHERE
|
||||||
|
readStatus = ${ReadStatus.Unread} AND
|
||||||
--- Update seenStatus to UnseenStatus.Unseen for certain messages
|
(
|
||||||
--- (NULL included because 'timer-notification' in 1:1 convos had type = NULL)
|
type IS NULL
|
||||||
|
OR
|
||||||
UPDATE messages
|
type IN (
|
||||||
SET
|
|
||||||
seenStatus = ${SeenStatus.Unseen}
|
|
||||||
WHERE
|
|
||||||
readStatus = ${ReadStatus.Unread} AND
|
|
||||||
(
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type IN (
|
|
||||||
'call-history',
|
|
||||||
'change-number-notification',
|
|
||||||
'chat-session-refreshed',
|
|
||||||
'delivery-issue',
|
|
||||||
'group',
|
|
||||||
'incoming',
|
|
||||||
'keychange',
|
|
||||||
'timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- Set readStatus to ReadStatus.Read for all other message types
|
|
||||||
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
readStatus = ${ReadStatus.Read}
|
|
||||||
WHERE
|
|
||||||
readStatus = ${ReadStatus.Unread} AND
|
|
||||||
type IS NOT NULL AND
|
|
||||||
type NOT IN (
|
|
||||||
'call-history',
|
'call-history',
|
||||||
'change-number-notification',
|
'change-number-notification',
|
||||||
'chat-session-refreshed',
|
'chat-session-refreshed',
|
||||||
|
@ -77,12 +45,28 @@ export default function updateToSchemaVersion56(
|
||||||
'keychange',
|
'keychange',
|
||||||
'timer-notification',
|
'timer-notification',
|
||||||
'verified-change'
|
'verified-change'
|
||||||
);
|
)
|
||||||
`
|
);
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 56');
|
--- Set readStatus to ReadStatus.Read for all other message types
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion56: success!');
|
UPDATE messages
|
||||||
|
SET
|
||||||
|
readStatus = ${ReadStatus.Read}
|
||||||
|
WHERE
|
||||||
|
readStatus = ${ReadStatus.Unread} AND
|
||||||
|
type IS NOT NULL AND
|
||||||
|
type NOT IN (
|
||||||
|
'call-history',
|
||||||
|
'change-number-notification',
|
||||||
|
'chat-session-refreshed',
|
||||||
|
'delivery-issue',
|
||||||
|
'group',
|
||||||
|
'incoming',
|
||||||
|
'keychange',
|
||||||
|
'timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
);
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,27 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion57(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion57(
|
`
|
||||||
currentVersion: number,
|
DELETE FROM messages
|
||||||
db: Database,
|
WHERE type IS 'message-history-unsynced';
|
||||||
logger: LoggerType
|
`
|
||||||
): void {
|
);
|
||||||
if (currentVersion >= 57) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DELETE FROM messages
|
|
||||||
WHERE type IS 'message-history-unsynced';
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 57');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion57: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,73 +5,43 @@ import type { Database } from '@signalapp/sqlcipher';
|
||||||
import { ReadStatus } from '../../messages/MessageReadStatus';
|
import { ReadStatus } from '../../messages/MessageReadStatus';
|
||||||
import { SeenStatus } from '../../MessageSeenStatus';
|
import { SeenStatus } from '../../MessageSeenStatus';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion58(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- Promote unread status in JSON to SQL column
|
||||||
|
|
||||||
export default function updateToSchemaVersion58(
|
-- NOTE: This was disabled because the 'unread' json field was deprecated
|
||||||
currentVersion: number,
|
-- in b0750e5f4e1f79f0f177b17cbe06d688431f948d, but the old value was kept
|
||||||
db: Database,
|
-- in the messages created before the release of that commit.
|
||||||
logger: LoggerType
|
--
|
||||||
): void {
|
-- UPDATE messages
|
||||||
if (currentVersion >= 58) {
|
-- SET
|
||||||
return;
|
-- readStatus = ${ReadStatus.Unread},
|
||||||
}
|
-- seenStatus = ${SeenStatus.Unseen}
|
||||||
|
-- WHERE
|
||||||
|
-- json_extract(json, '$.unread') IS true OR
|
||||||
|
-- json_extract(json, '$.unread') IS 1;
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Clean up all old messages that still have a null read status
|
||||||
db.exec(
|
--- Note: we don't need to update seenStatus, because that was defaulted to zero
|
||||||
`
|
|
||||||
--- Promote unread status in JSON to SQL column
|
|
||||||
|
|
||||||
-- NOTE: This was disabled because the 'unread' json field was deprecated
|
UPDATE messages
|
||||||
-- in b0750e5f4e1f79f0f177b17cbe06d688431f948d, but the old value was kept
|
SET
|
||||||
-- in the messages created before the release of that commit.
|
readStatus = ${ReadStatus.Read}
|
||||||
--
|
WHERE
|
||||||
-- UPDATE messages
|
readStatus IS NULL;
|
||||||
-- SET
|
|
||||||
-- readStatus = ${ReadStatus.Unread},
|
|
||||||
-- seenStatus = ${SeenStatus.Unseen}
|
|
||||||
-- WHERE
|
|
||||||
-- json_extract(json, '$.unread') IS true OR
|
|
||||||
-- json_extract(json, '$.unread') IS 1;
|
|
||||||
|
|
||||||
--- Clean up all old messages that still have a null read status
|
--- Re-run unseen/unread queries from migration 56
|
||||||
--- Note: we don't need to update seenStatus, because that was defaulted to zero
|
|
||||||
|
|
||||||
UPDATE messages
|
UPDATE messages
|
||||||
SET
|
SET
|
||||||
readStatus = ${ReadStatus.Read}
|
seenStatus = ${SeenStatus.Unseen}
|
||||||
WHERE
|
WHERE
|
||||||
readStatus IS NULL;
|
readStatus = ${ReadStatus.Unread} AND
|
||||||
|
(
|
||||||
--- Re-run unseen/unread queries from migration 56
|
type IS NULL
|
||||||
|
OR
|
||||||
UPDATE messages
|
type IN (
|
||||||
SET
|
|
||||||
seenStatus = ${SeenStatus.Unseen}
|
|
||||||
WHERE
|
|
||||||
readStatus = ${ReadStatus.Unread} AND
|
|
||||||
(
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type IN (
|
|
||||||
'call-history',
|
|
||||||
'change-number-notification',
|
|
||||||
'chat-session-refreshed',
|
|
||||||
'delivery-issue',
|
|
||||||
'group',
|
|
||||||
'incoming',
|
|
||||||
'keychange',
|
|
||||||
'timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
readStatus = ${ReadStatus.Read}
|
|
||||||
WHERE
|
|
||||||
readStatus = ${ReadStatus.Unread} AND
|
|
||||||
type IS NOT NULL AND
|
|
||||||
type NOT IN (
|
|
||||||
'call-history',
|
'call-history',
|
||||||
'change-number-notification',
|
'change-number-notification',
|
||||||
'chat-session-refreshed',
|
'chat-session-refreshed',
|
||||||
|
@ -81,56 +51,70 @@ export default function updateToSchemaVersion58(
|
||||||
'keychange',
|
'keychange',
|
||||||
'timer-notification',
|
'timer-notification',
|
||||||
'verified-change'
|
'verified-change'
|
||||||
);
|
|
||||||
|
|
||||||
--- (new) Ensure these message types are not unread, just unseen
|
|
||||||
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
readStatus = ${ReadStatus.Read}
|
|
||||||
WHERE
|
|
||||||
readStatus = ${ReadStatus.Unread} AND
|
|
||||||
(
|
|
||||||
type IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'keychange'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- (new) Ensure that these message types are neither unseen nor unread
|
|
||||||
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
readStatus = ${ReadStatus.Read},
|
|
||||||
seenStatus = ${SeenStatus.Seen}
|
|
||||||
WHERE
|
|
||||||
type IN (
|
|
||||||
'group-v1-migration',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'outgoing',
|
|
||||||
'profile-change',
|
|
||||||
'universal-timer-notification'
|
|
||||||
);
|
|
||||||
|
|
||||||
--- Make sure JSON reflects SQL columns
|
|
||||||
|
|
||||||
UPDATE messages
|
|
||||||
SET
|
|
||||||
json = json_patch(
|
|
||||||
json,
|
|
||||||
json_object(
|
|
||||||
'readStatus', readStatus,
|
|
||||||
'seenStatus', seenStatus
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
WHERE
|
);
|
||||||
readStatus IS NOT NULL OR
|
|
||||||
seenStatus IS NOT 0;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 58');
|
UPDATE messages
|
||||||
})();
|
SET
|
||||||
|
readStatus = ${ReadStatus.Read}
|
||||||
|
WHERE
|
||||||
|
readStatus = ${ReadStatus.Unread} AND
|
||||||
|
type IS NOT NULL AND
|
||||||
|
type NOT IN (
|
||||||
|
'call-history',
|
||||||
|
'change-number-notification',
|
||||||
|
'chat-session-refreshed',
|
||||||
|
'delivery-issue',
|
||||||
|
'group',
|
||||||
|
'incoming',
|
||||||
|
'keychange',
|
||||||
|
'timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
);
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion58: success!');
|
--- (new) Ensure these message types are not unread, just unseen
|
||||||
|
|
||||||
|
UPDATE messages
|
||||||
|
SET
|
||||||
|
readStatus = ${ReadStatus.Read}
|
||||||
|
WHERE
|
||||||
|
readStatus = ${ReadStatus.Unread} AND
|
||||||
|
(
|
||||||
|
type IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'keychange'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
--- (new) Ensure that these message types are neither unseen nor unread
|
||||||
|
|
||||||
|
UPDATE messages
|
||||||
|
SET
|
||||||
|
readStatus = ${ReadStatus.Read},
|
||||||
|
seenStatus = ${SeenStatus.Seen}
|
||||||
|
WHERE
|
||||||
|
type IN (
|
||||||
|
'group-v1-migration',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'outgoing',
|
||||||
|
'profile-change',
|
||||||
|
'universal-timer-notification'
|
||||||
|
);
|
||||||
|
|
||||||
|
--- Make sure JSON reflects SQL columns
|
||||||
|
|
||||||
|
UPDATE messages
|
||||||
|
SET
|
||||||
|
json = json_patch(
|
||||||
|
json,
|
||||||
|
json_object(
|
||||||
|
'readStatus', readStatus,
|
||||||
|
'seenStatus', seenStatus
|
||||||
|
)
|
||||||
|
)
|
||||||
|
WHERE
|
||||||
|
readStatus IS NOT NULL OR
|
||||||
|
seenStatus IS NOT 0;
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,26 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion59(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion59(
|
`
|
||||||
currentVersion: number,
|
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
|
||||||
db: Database,
|
(receivedAtCounter)
|
||||||
logger: LoggerType
|
`
|
||||||
): void {
|
);
|
||||||
if (currentVersion >= 59) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
CREATE INDEX unprocessed_byReceivedAtCounter ON unprocessed
|
|
||||||
(receivedAtCounter)
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 59');
|
|
||||||
})();
|
|
||||||
logger.info('updateToSchemaVersion59: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,38 +3,22 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
// TODO: DESKTOP-3694
|
// TODO: DESKTOP-3694
|
||||||
export default function updateToSchemaVersion60(
|
export default function updateToSchemaVersion60(db: Database): void {
|
||||||
currentVersion: number,
|
db.exec(
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
DROP INDEX expiring_message_by_conversation_and_received_at;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 60) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
CREATE INDEX expiring_message_by_conversation_and_received_at
|
||||||
db.exec(
|
ON messages
|
||||||
`
|
(
|
||||||
DROP INDEX expiring_message_by_conversation_and_received_at;
|
conversationId,
|
||||||
|
storyId,
|
||||||
CREATE INDEX expiring_message_by_conversation_and_received_at
|
expirationStartTimestamp,
|
||||||
ON messages
|
expireTimer,
|
||||||
(
|
received_at
|
||||||
conversationId,
|
)
|
||||||
storyId,
|
WHERE isStory IS 0 AND type IS 'incoming';
|
||||||
expirationStartTimestamp,
|
`
|
||||||
expireTimer,
|
);
|
||||||
received_at
|
|
||||||
)
|
|
||||||
WHERE isStory IS 0 AND type IS 'incoming';
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 60');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion60: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,42 +3,26 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion61(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
ALTER TABLE storyDistributions DROP COLUMN avatarKey;
|
||||||
|
ALTER TABLE storyDistributions DROP COLUMN avatarUrlPath;
|
||||||
|
|
||||||
export default function updateToSchemaVersion61(
|
ALTER TABLE storyDistributions ADD COLUMN deletedAtTimestamp INTEGER;
|
||||||
currentVersion: number,
|
ALTER TABLE storyDistributions ADD COLUMN allowsReplies INTEGER;
|
||||||
db: Database,
|
ALTER TABLE storyDistributions ADD COLUMN isBlockList INTEGER;
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 61) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE storyDistributions ADD COLUMN storageID STRING;
|
||||||
db.exec(
|
ALTER TABLE storyDistributions ADD COLUMN storageVersion INTEGER;
|
||||||
`
|
ALTER TABLE storyDistributions ADD COLUMN storageUnknownFields BLOB;
|
||||||
ALTER TABLE storyDistributions DROP COLUMN avatarKey;
|
ALTER TABLE storyDistributions ADD COLUMN storageNeedsSync INTEGER;
|
||||||
ALTER TABLE storyDistributions DROP COLUMN avatarUrlPath;
|
|
||||||
|
|
||||||
ALTER TABLE storyDistributions ADD COLUMN deletedAtTimestamp INTEGER;
|
ALTER TABLE messages ADD COLUMN storyDistributionListId STRING;
|
||||||
ALTER TABLE storyDistributions ADD COLUMN allowsReplies INTEGER;
|
|
||||||
ALTER TABLE storyDistributions ADD COLUMN isBlockList INTEGER;
|
|
||||||
|
|
||||||
ALTER TABLE storyDistributions ADD COLUMN storageID STRING;
|
CREATE INDEX messages_by_distribution_list
|
||||||
ALTER TABLE storyDistributions ADD COLUMN storageVersion INTEGER;
|
ON messages(storyDistributionListId, received_at)
|
||||||
ALTER TABLE storyDistributions ADD COLUMN storageUnknownFields BLOB;
|
WHERE storyDistributionListId IS NOT NULL;
|
||||||
ALTER TABLE storyDistributions ADD COLUMN storageNeedsSync INTEGER;
|
`
|
||||||
|
);
|
||||||
ALTER TABLE messages ADD COLUMN storyDistributionListId STRING;
|
|
||||||
|
|
||||||
CREATE INDEX messages_by_distribution_list
|
|
||||||
ON messages(storyDistributionListId, received_at)
|
|
||||||
WHERE storyDistributionListId IS NOT NULL;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 61');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion61: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,26 +3,10 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion62(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion62(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE sendLogPayloads ADD COLUMN urgent INTEGER;
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
);
|
||||||
): void {
|
|
||||||
if (currentVersion >= 62) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE sendLogPayloads ADD COLUMN urgent INTEGER;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 62');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion62: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,26 +3,10 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion63(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion63(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE unprocessed ADD COLUMN urgent INTEGER;
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
);
|
||||||
): void {
|
|
||||||
if (currentVersion >= 63) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE unprocessed ADD COLUMN urgent INTEGER;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 63');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion63: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,36 +3,20 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion64(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
ALTER TABLE preKeys
|
||||||
|
ADD COLUMN ourUuid STRING
|
||||||
|
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
|
||||||
|
|
||||||
export default function updateToSchemaVersion64(
|
CREATE INDEX preKeys_ourUuid ON preKeys (ourUuid);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 64) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE signedPreKeys
|
||||||
db.exec(
|
ADD COLUMN ourUuid STRING
|
||||||
`
|
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
|
||||||
ALTER TABLE preKeys
|
|
||||||
ADD COLUMN ourUuid STRING
|
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
|
|
||||||
|
|
||||||
CREATE INDEX preKeys_ourUuid ON preKeys (ourUuid);
|
CREATE INDEX signedPreKeys_ourUuid ON signedPreKeys (ourUuid);
|
||||||
|
`
|
||||||
ALTER TABLE signedPreKeys
|
);
|
||||||
ADD COLUMN ourUuid STRING
|
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'));
|
|
||||||
|
|
||||||
CREATE INDEX signedPreKeys_ourUuid ON signedPreKeys (ourUuid);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 64');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion64: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,60 +3,44 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion65(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
ALTER TABLE sticker_packs ADD COLUMN position INTEGER DEFAULT 0 NOT NULL;
|
||||||
|
ALTER TABLE sticker_packs ADD COLUMN storageID STRING;
|
||||||
|
ALTER TABLE sticker_packs ADD COLUMN storageVersion INTEGER;
|
||||||
|
ALTER TABLE sticker_packs ADD COLUMN storageUnknownFields BLOB;
|
||||||
|
ALTER TABLE sticker_packs
|
||||||
|
ADD COLUMN storageNeedsSync
|
||||||
|
INTEGER DEFAULT 0 NOT NULL;
|
||||||
|
|
||||||
export default function updateToSchemaVersion65(
|
CREATE TABLE uninstalled_sticker_packs (
|
||||||
currentVersion: number,
|
id STRING NOT NULL PRIMARY KEY,
|
||||||
db: Database,
|
uninstalledAt NUMBER NOT NULL,
|
||||||
logger: LoggerType
|
storageID STRING,
|
||||||
): void {
|
storageVersion NUMBER,
|
||||||
if (currentVersion >= 65) {
|
storageUnknownFields BLOB,
|
||||||
return;
|
storageNeedsSync INTEGER NOT NULL
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE sticker_packs ADD COLUMN position INTEGER DEFAULT 0 NOT NULL;
|
|
||||||
ALTER TABLE sticker_packs ADD COLUMN storageID STRING;
|
|
||||||
ALTER TABLE sticker_packs ADD COLUMN storageVersion INTEGER;
|
|
||||||
ALTER TABLE sticker_packs ADD COLUMN storageUnknownFields BLOB;
|
|
||||||
ALTER TABLE sticker_packs
|
|
||||||
ADD COLUMN storageNeedsSync
|
|
||||||
INTEGER DEFAULT 0 NOT NULL;
|
|
||||||
|
|
||||||
CREATE TABLE uninstalled_sticker_packs (
|
|
||||||
id STRING NOT NULL PRIMARY KEY,
|
|
||||||
uninstalledAt NUMBER NOT NULL,
|
|
||||||
storageID STRING,
|
|
||||||
storageVersion NUMBER,
|
|
||||||
storageUnknownFields BLOB,
|
|
||||||
storageNeedsSync INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Set initial position
|
|
||||||
|
|
||||||
UPDATE sticker_packs
|
|
||||||
SET
|
|
||||||
position = (row_number - 1),
|
|
||||||
storageNeedsSync = 1
|
|
||||||
FROM (
|
|
||||||
SELECT id, row_number() OVER (ORDER BY lastUsed DESC) as row_number
|
|
||||||
FROM sticker_packs
|
|
||||||
) as ordered_pairs
|
|
||||||
WHERE sticker_packs.id IS ordered_pairs.id;
|
|
||||||
|
|
||||||
-- See: getAllStickerPacks
|
|
||||||
|
|
||||||
CREATE INDEX sticker_packs_by_position_and_id ON sticker_packs (
|
|
||||||
position ASC,
|
|
||||||
id ASC
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 65');
|
-- Set initial position
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion65: success!');
|
UPDATE sticker_packs
|
||||||
|
SET
|
||||||
|
position = (row_number - 1),
|
||||||
|
storageNeedsSync = 1
|
||||||
|
FROM (
|
||||||
|
SELECT id, row_number() OVER (ORDER BY lastUsed DESC) as row_number
|
||||||
|
FROM sticker_packs
|
||||||
|
) as ordered_pairs
|
||||||
|
WHERE sticker_packs.id IS ordered_pairs.id;
|
||||||
|
|
||||||
|
-- See: getAllStickerPacks
|
||||||
|
|
||||||
|
CREATE INDEX sticker_packs_by_position_and_id ON sticker_packs (
|
||||||
|
position ASC,
|
||||||
|
id ASC
|
||||||
|
);
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,27 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion66(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion66(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE sendLogPayloads
|
||||||
db: Database,
|
ADD COLUMN hasPniSignatureMessage INTEGER DEFAULT 0 NOT NULL;
|
||||||
logger: LoggerType
|
`
|
||||||
): void {
|
);
|
||||||
if (currentVersion >= 66) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE sendLogPayloads
|
|
||||||
ADD COLUMN hasPniSignatureMessage INTEGER DEFAULT 0 NOT NULL;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 66');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion66: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,26 +3,10 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion67(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion67(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE unprocessed ADD COLUMN story INTEGER;
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
);
|
||||||
): void {
|
|
||||||
if (currentVersion >= 67) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE unprocessed ADD COLUMN story INTEGER;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 67');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion67: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,29 +3,13 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion68(db: Database): void {
|
||||||
|
db.exec(
|
||||||
export default function updateToSchemaVersion68(
|
`
|
||||||
currentVersion: number,
|
ALTER TABLE messages
|
||||||
db: Database,
|
DROP COLUMN deprecatedSourceDevice;
|
||||||
logger: LoggerType
|
ALTER TABLE unprocessed
|
||||||
): void {
|
DROP COLUMN deprecatedSourceDevice;
|
||||||
if (currentVersion >= 68) {
|
`
|
||||||
return;
|
);
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
ALTER TABLE messages
|
|
||||||
DROP COLUMN deprecatedSourceDevice;
|
|
||||||
ALTER TABLE unprocessed
|
|
||||||
DROP COLUMN deprecatedSourceDevice;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 68');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion68: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,31 +3,15 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion69(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
DROP TABLE IF EXISTS groupCallRings;
|
||||||
|
|
||||||
export default function updateToSchemaVersion69(
|
CREATE TABLE groupCallRingCancellations(
|
||||||
currentVersion: number,
|
ringId INTEGER PRIMARY KEY,
|
||||||
db: Database,
|
createdAt INTEGER NOT NULL
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 69) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
DROP TABLE IF EXISTS groupCallRings;
|
|
||||||
|
|
||||||
CREATE TABLE groupCallRingCancellations(
|
|
||||||
ringId INTEGER PRIMARY KEY,
|
|
||||||
createdAt INTEGER NOT NULL
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
`
|
||||||
db.pragma('user_version = 69');
|
);
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion69: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,27 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion70(db: Database): void {
|
||||||
|
// Used in `getAllStories`.
|
||||||
export default function updateToSchemaVersion70(
|
db.exec(
|
||||||
currentVersion: number,
|
`
|
||||||
db: Database,
|
CREATE INDEX messages_by_storyId ON messages (storyId);
|
||||||
logger: LoggerType
|
`
|
||||||
): void {
|
);
|
||||||
if (currentVersion >= 70) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// Used in `getAllStories`.
|
|
||||||
db.exec(
|
|
||||||
`
|
|
||||||
CREATE INDEX messages_by_storyId ON messages (storyId);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 70');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion70: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,121 +3,105 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion71(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- These will be re-added below
|
||||||
|
DROP INDEX messages_preview;
|
||||||
|
DROP INDEX messages_activity;
|
||||||
|
DROP INDEX message_user_initiated;
|
||||||
|
|
||||||
export default function updateToSchemaVersion71(
|
--- These will also be re-added below
|
||||||
currentVersion: number,
|
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
||||||
db: Database,
|
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
||||||
logger: LoggerType
|
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 71) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Note: These generated columns were originally introduced in migration 47, and
|
||||||
db.exec(
|
--- are mostly the same
|
||||||
`
|
|
||||||
--- These will be re-added below
|
--- Based on the current list (model-types.ts), the types which DO affect activity:
|
||||||
DROP INDEX messages_preview;
|
--- NULL (old, malformed data)
|
||||||
DROP INDEX messages_activity;
|
--- call-history
|
||||||
DROP INDEX message_user_initiated;
|
--- chat-session-refreshed (deprecated)
|
||||||
|
--- delivery-issue
|
||||||
|
--- group (deprecated)
|
||||||
|
--- group-v2-change
|
||||||
|
--- incoming
|
||||||
|
--- outgoing
|
||||||
|
--- timer-notification
|
||||||
|
|
||||||
--- These will also be re-added below
|
--- (change: added conversation-merge, keychange, and phone-number-discovery)
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
ALTER TABLE messages
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
ADD COLUMN shouldAffectActivity INTEGER
|
||||||
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'phone-number-discovery',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- Note: These generated columns were originally introduced in migration 47, and
|
--- (change: added conversation-merge and phone-number-discovery
|
||||||
--- are mostly the same
|
--- (now matches the above list)
|
||||||
|
ALTER TABLE messages
|
||||||
--- Based on the current list (model-types.ts), the types which DO affect activity:
|
ADD COLUMN shouldAffectPreview INTEGER
|
||||||
--- NULL (old, malformed data)
|
GENERATED ALWAYS AS (
|
||||||
--- call-history
|
type IS NULL
|
||||||
--- chat-session-refreshed (deprecated)
|
OR
|
||||||
--- delivery-issue
|
type NOT IN (
|
||||||
--- group (deprecated)
|
'change-number-notification',
|
||||||
--- group-v2-change
|
'conversation-merge',
|
||||||
--- incoming
|
'group-v1-migration',
|
||||||
--- outgoing
|
'keychange',
|
||||||
--- timer-notification
|
'message-history-unsynced',
|
||||||
|
'phone-number-discovery',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- (change: added conversation-merge, keychange, and phone-number-discovery)
|
--- Note: This list only differs from the above on these types:
|
||||||
ALTER TABLE messages
|
--- group-v2-change
|
||||||
ADD COLUMN shouldAffectActivity INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'phone-number-discovery',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- (change: added conversation-merge and phone-number-discovery
|
--- (change: added conversation-merge and phone-number-discovery
|
||||||
--- (now matches the above list)
|
ALTER TABLE messages
|
||||||
ALTER TABLE messages
|
ADD COLUMN isUserInitiatedMessage INTEGER
|
||||||
ADD COLUMN shouldAffectPreview INTEGER
|
GENERATED ALWAYS AS (
|
||||||
GENERATED ALWAYS AS (
|
type IS NULL
|
||||||
type IS NULL
|
OR
|
||||||
OR
|
type NOT IN (
|
||||||
type NOT IN (
|
'change-number-notification',
|
||||||
'change-number-notification',
|
'conversation-merge',
|
||||||
'conversation-merge',
|
'group-v1-migration',
|
||||||
'group-v1-migration',
|
'group-v2-change',
|
||||||
'keychange',
|
'keychange',
|
||||||
'message-history-unsynced',
|
'message-history-unsynced',
|
||||||
'phone-number-discovery',
|
'phone-number-discovery',
|
||||||
'profile-change',
|
'profile-change',
|
||||||
'story',
|
'story',
|
||||||
'universal-timer-notification',
|
'universal-timer-notification',
|
||||||
'verified-change'
|
'verified-change'
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
--- Note: This list only differs from the above on these types:
|
CREATE INDEX messages_preview ON messages
|
||||||
--- group-v2-change
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
||||||
|
|
||||||
--- (change: added conversation-merge and phone-number-discovery
|
CREATE INDEX messages_activity ON messages
|
||||||
ALTER TABLE messages
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
ADD COLUMN isUserInitiatedMessage INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'group-v2-change',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'phone-number-discovery',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX messages_preview ON messages
|
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
`
|
||||||
|
);
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 71');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion71: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,37 +2,22 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export default function updateToSchemaVersion72(
|
export default function updateToSchemaVersion72(db: Database): void {
|
||||||
currentVersion: number,
|
db.exec(
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
ALTER TABLE messages
|
||||||
): void {
|
ADD COLUMN callId TEXT
|
||||||
if (currentVersion >= 72) {
|
GENERATED ALWAYS AS (
|
||||||
return;
|
json_extract(json, '$.callHistoryDetails.callId')
|
||||||
}
|
);
|
||||||
|
ALTER TABLE messages
|
||||||
db.transaction(() => {
|
ADD COLUMN callMode TEXT
|
||||||
db.exec(
|
GENERATED ALWAYS AS (
|
||||||
`
|
json_extract(json, '$.callHistoryDetails.callMode')
|
||||||
ALTER TABLE messages
|
);
|
||||||
ADD COLUMN callId TEXT
|
CREATE INDEX messages_call ON messages
|
||||||
GENERATED ALWAYS AS (
|
(conversationId, type, callMode, callId);
|
||||||
json_extract(json, '$.callHistoryDetails.callId')
|
`
|
||||||
);
|
);
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN callMode TEXT
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
json_extract(json, '$.callHistoryDetails.callMode')
|
|
||||||
);
|
|
||||||
CREATE INDEX messages_call ON messages
|
|
||||||
(conversationId, type, callMode, callId);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 72');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion72: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,110 +3,94 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion73(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- Delete deprecated notifications
|
||||||
|
DELETE FROM messages WHERE type IS 'phone-number-discovery';
|
||||||
|
|
||||||
export default function updateToSchemaVersion73(
|
--- These will be re-added below
|
||||||
currentVersion: number,
|
DROP INDEX messages_preview;
|
||||||
db: Database,
|
DROP INDEX messages_activity;
|
||||||
logger: LoggerType
|
DROP INDEX message_user_initiated;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 73) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- These will also be re-added below
|
||||||
db.exec(
|
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
||||||
`
|
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
||||||
--- Delete deprecated notifications
|
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
||||||
DELETE FROM messages WHERE type IS 'phone-number-discovery';
|
|
||||||
|
|
||||||
--- These will be re-added below
|
--- Note: These generated columns were originally introduced in migration 71, and
|
||||||
DROP INDEX messages_preview;
|
--- are mostly the same
|
||||||
DROP INDEX messages_activity;
|
|
||||||
DROP INDEX message_user_initiated;
|
|
||||||
|
|
||||||
--- These will also be re-added below
|
--- (change: removed phone-number-discovery)
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
ALTER TABLE messages
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
ADD COLUMN shouldAffectActivity INTEGER
|
||||||
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- Note: These generated columns were originally introduced in migration 71, and
|
--- (change: removed phone-number-discovery
|
||||||
--- are mostly the same
|
--- (now matches the above list)
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN shouldAffectPreview INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- (change: removed phone-number-discovery)
|
--- Note: This list only differs from the above on these types:
|
||||||
ALTER TABLE messages
|
--- group-v2-change
|
||||||
ADD COLUMN shouldAffectActivity INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- (change: removed phone-number-discovery
|
--- (change: removed phone-number-discovery
|
||||||
--- (now matches the above list)
|
ALTER TABLE messages
|
||||||
ALTER TABLE messages
|
ADD COLUMN isUserInitiatedMessage INTEGER
|
||||||
ADD COLUMN shouldAffectPreview INTEGER
|
GENERATED ALWAYS AS (
|
||||||
GENERATED ALWAYS AS (
|
type IS NULL
|
||||||
type IS NULL
|
OR
|
||||||
OR
|
type NOT IN (
|
||||||
type NOT IN (
|
'change-number-notification',
|
||||||
'change-number-notification',
|
'conversation-merge',
|
||||||
'conversation-merge',
|
'group-v1-migration',
|
||||||
'group-v1-migration',
|
'group-v2-change',
|
||||||
'keychange',
|
'keychange',
|
||||||
'message-history-unsynced',
|
'message-history-unsynced',
|
||||||
'profile-change',
|
'profile-change',
|
||||||
'story',
|
'story',
|
||||||
'universal-timer-notification',
|
'universal-timer-notification',
|
||||||
'verified-change'
|
'verified-change'
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
--- Note: This list only differs from the above on these types:
|
CREATE INDEX messages_preview ON messages
|
||||||
--- group-v2-change
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
||||||
|
|
||||||
--- (change: removed phone-number-discovery
|
CREATE INDEX messages_activity ON messages
|
||||||
ALTER TABLE messages
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
ADD COLUMN isUserInitiatedMessage INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'group-v2-change',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX messages_preview ON messages
|
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther, expiresAt, received_at, sent_at);
|
`
|
||||||
|
);
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
CREATE INDEX message_user_initiated ON messages (isUserInitiatedMessage);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 73');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion73: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,37 +3,21 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion74(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
-- Previously: (isUserInitiatedMessage)
|
||||||
|
DROP INDEX message_user_initiated;
|
||||||
|
|
||||||
export default function updateToSchemaVersion74(
|
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 74) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
-- Previously: (unread, conversationId)
|
||||||
db.exec(
|
DROP INDEX reactions_unread;
|
||||||
`
|
|
||||||
-- Previously: (isUserInitiatedMessage)
|
|
||||||
DROP INDEX message_user_initiated;
|
|
||||||
|
|
||||||
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
CREATE INDEX reactions_unread ON reactions (
|
||||||
|
conversationId,
|
||||||
-- Previously: (unread, conversationId)
|
unread
|
||||||
DROP INDEX reactions_unread;
|
|
||||||
|
|
||||||
CREATE INDEX reactions_unread ON reactions (
|
|
||||||
conversationId,
|
|
||||||
unread
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
`
|
||||||
db.pragma('user_version = 74');
|
);
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion74: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,8 @@
|
||||||
// Copyright 2023 Signal Messenger, LLC
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
export default function updateToSchemaVersion75(): void {
|
||||||
|
// This was previously a FTS5 migration, but we had to reorder the
|
||||||
import type { LoggerType } from '../../types/Logging';
|
// migrations for backports.
|
||||||
|
// See: migrations 76 and 77.
|
||||||
export default function updateToSchemaVersion75(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 75) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// This was previously a FTS5 migration, but we had to reorder the
|
|
||||||
// migrations for backports.
|
|
||||||
// See: migrations 76 and 77.
|
|
||||||
db.pragma('user_version = 75');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion75: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,57 +3,41 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion76(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
-- Re-created below
|
||||||
|
DROP INDEX IF EXISTS message_expires_at;
|
||||||
|
DROP INDEX IF EXISTS messages_preview;
|
||||||
|
|
||||||
export default function updateToSchemaVersion76(
|
-- Create non-null expiresAt column
|
||||||
currentVersion: number,
|
ALTER TABLE messages
|
||||||
db: Database,
|
DROP COLUMN expiresAt;
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 76) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
ALTER TABLE messages
|
||||||
db.exec(
|
ADD COLUMN
|
||||||
`
|
expiresAt INT
|
||||||
-- Re-created below
|
GENERATED ALWAYS
|
||||||
DROP INDEX IF EXISTS message_expires_at;
|
AS (ifnull(
|
||||||
DROP INDEX IF EXISTS messages_preview;
|
expirationStartTimestamp + (expireTimer * 1000),
|
||||||
|
${Number.MAX_SAFE_INTEGER}
|
||||||
|
));
|
||||||
|
|
||||||
-- Create non-null expiresAt column
|
-- Re-create indexes
|
||||||
ALTER TABLE messages
|
-- Note the "s" at the end of "messages"
|
||||||
DROP COLUMN expiresAt;
|
CREATE INDEX messages_expires_at ON messages (
|
||||||
|
expiresAt
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN
|
|
||||||
expiresAt INT
|
|
||||||
GENERATED ALWAYS
|
|
||||||
AS (ifnull(
|
|
||||||
expirationStartTimestamp + (expireTimer * 1000),
|
|
||||||
${Number.MAX_SAFE_INTEGER}
|
|
||||||
));
|
|
||||||
|
|
||||||
-- Re-create indexes
|
|
||||||
-- Note the "s" at the end of "messages"
|
|
||||||
CREATE INDEX messages_expires_at ON messages (
|
|
||||||
expiresAt
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Note that expiresAt is intentionally dropped from the index since
|
|
||||||
-- expiresAt > $now is likely to be true so we just try selecting it
|
|
||||||
-- *after* ordering by received_at/sent_at.
|
|
||||||
CREATE INDEX messages_preview ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
|
||||||
received_at, sent_at);
|
|
||||||
CREATE INDEX messages_preview_without_story ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
|
||||||
received_at, sent_at) WHERE storyId IS NULL;
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 76');
|
-- Note that expiresAt is intentionally dropped from the index since
|
||||||
})();
|
-- expiresAt > $now is likely to be true so we just try selecting it
|
||||||
|
-- *after* ordering by received_at/sent_at.
|
||||||
logger.info('updateToSchemaVersion76: success!');
|
CREATE INDEX messages_preview ON messages
|
||||||
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
|
received_at, sent_at);
|
||||||
|
CREATE INDEX messages_preview_without_story ON messages
|
||||||
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
|
received_at, sent_at) WHERE storyId IS NULL;
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,42 +3,26 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion77(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
-- Create FTS table with custom tokenizer from
|
||||||
|
-- @signalapp/sqlcipher.
|
||||||
|
|
||||||
export default function updateToSchemaVersion77(
|
DROP TABLE messages_fts;
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 77) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
CREATE VIRTUAL TABLE messages_fts USING fts5(
|
||||||
db.exec(
|
body,
|
||||||
`
|
tokenize = 'signal_tokenizer'
|
||||||
-- Create FTS table with custom tokenizer from
|
|
||||||
-- @signalapp/sqlcipher.
|
|
||||||
|
|
||||||
DROP TABLE messages_fts;
|
|
||||||
|
|
||||||
CREATE VIRTUAL TABLE messages_fts USING fts5(
|
|
||||||
body,
|
|
||||||
tokenize = 'signal_tokenizer'
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Reindex messages
|
|
||||||
-- Based on messages_on_insert trigger from migrations/45-stories.ts
|
|
||||||
|
|
||||||
INSERT INTO messages_fts (rowid, body)
|
|
||||||
SELECT rowid, body
|
|
||||||
FROM messages
|
|
||||||
WHERE isViewOnce IS NOT 1 AND storyId IS NULL;
|
|
||||||
`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
db.pragma('user_version = 77');
|
-- Reindex messages
|
||||||
})();
|
-- Based on messages_on_insert trigger from migrations/45-stories.ts
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion77: success!');
|
INSERT INTO messages_fts (rowid, body)
|
||||||
|
SELECT rowid, body
|
||||||
|
FROM messages
|
||||||
|
WHERE isViewOnce IS NOT 1 AND storyId IS NULL;
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,125 +7,112 @@ import type { WritableDB } from '../Interface';
|
||||||
import { getJobsInQueue, insertJob } from '../Server';
|
import { getJobsInQueue, insertJob } from '../Server';
|
||||||
|
|
||||||
export default function updateToSchemaVersion78(
|
export default function updateToSchemaVersion78(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 78) {
|
const deleteJobsInQueue = db.prepare(
|
||||||
return;
|
'DELETE FROM jobs WHERE queueType = $queueType'
|
||||||
|
);
|
||||||
|
|
||||||
|
const queues = [
|
||||||
|
{
|
||||||
|
queueType: 'delivery receipts',
|
||||||
|
jobDataKey: 'deliveryReceipts',
|
||||||
|
jobDataIsArray: true,
|
||||||
|
newReceiptsType: 'deliveryReceipt',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queueType: 'read receipts',
|
||||||
|
jobDataKey: 'readReceipts',
|
||||||
|
jobDataIsArray: true,
|
||||||
|
newReceiptsType: 'readReceipt',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queueType: 'viewed receipts',
|
||||||
|
jobDataKey: 'viewedReceipt',
|
||||||
|
jobDataIsArray: false,
|
||||||
|
newReceiptsType: 'viewedReceipt',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const getMessageById = db.prepare(
|
||||||
|
'SELECT conversationId FROM messages WHERE id IS ?'
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const queue of queues) {
|
||||||
|
const prevJobs = getJobsInQueue(db, queue.queueType);
|
||||||
|
deleteJobsInQueue.run({ queueType: queue.queueType });
|
||||||
|
|
||||||
|
prevJobs.forEach(job => {
|
||||||
|
const { data, id } = job;
|
||||||
|
if (!isRecord(data)) {
|
||||||
|
logger.warn(
|
||||||
|
`${queue.queueType} queue job ${id} was missing valid data`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { messageId } = data;
|
||||||
|
if (typeof messageId !== 'string') {
|
||||||
|
logger.warn(
|
||||||
|
`${queue.queueType} queue job ${id} had a non-string messageId`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const message = getMessageById.get([messageId]);
|
||||||
|
if (!message) {
|
||||||
|
logger.warn(`Unable to find message for ${queue.queueType} job ${id}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { conversationId } = message;
|
||||||
|
if (typeof conversationId !== 'string') {
|
||||||
|
logger.warn(
|
||||||
|
`${queue.queueType} queue job ${id} had a non-string conversationId`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const oldReceipts = queue.jobDataIsArray
|
||||||
|
? data[queue.jobDataKey]
|
||||||
|
: [data[queue.jobDataKey]];
|
||||||
|
|
||||||
|
if (!Array.isArray(oldReceipts)) {
|
||||||
|
logger.warn(
|
||||||
|
`${queue.queueType} queue job ${id} had a non-array ${queue.jobDataKey}`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newReceipts = [];
|
||||||
|
|
||||||
|
for (const receipt of oldReceipts) {
|
||||||
|
if (!isRecord(receipt)) {
|
||||||
|
logger.warn(
|
||||||
|
`${queue.queueType} queue job ${id} had a non-record receipt`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
newReceipts.push({
|
||||||
|
...receipt,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const newJob = {
|
||||||
|
...job,
|
||||||
|
queueType: 'conversation',
|
||||||
|
data: {
|
||||||
|
type: 'Receipts',
|
||||||
|
conversationId,
|
||||||
|
receiptsType: queue.newReceiptsType,
|
||||||
|
receipts: newReceipts,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
insertJob(db, newJob);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
const deleteJobsInQueue = db.prepare(
|
|
||||||
'DELETE FROM jobs WHERE queueType = $queueType'
|
|
||||||
);
|
|
||||||
|
|
||||||
const queues = [
|
|
||||||
{
|
|
||||||
queueType: 'delivery receipts',
|
|
||||||
jobDataKey: 'deliveryReceipts',
|
|
||||||
jobDataIsArray: true,
|
|
||||||
newReceiptsType: 'deliveryReceipt',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
queueType: 'read receipts',
|
|
||||||
jobDataKey: 'readReceipts',
|
|
||||||
jobDataIsArray: true,
|
|
||||||
newReceiptsType: 'readReceipt',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
queueType: 'viewed receipts',
|
|
||||||
jobDataKey: 'viewedReceipt',
|
|
||||||
jobDataIsArray: false,
|
|
||||||
newReceiptsType: 'viewedReceipt',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const getMessageById = db.prepare(
|
|
||||||
'SELECT conversationId FROM messages WHERE id IS ?'
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const queue of queues) {
|
|
||||||
const prevJobs = getJobsInQueue(db, queue.queueType);
|
|
||||||
deleteJobsInQueue.run({ queueType: queue.queueType });
|
|
||||||
|
|
||||||
prevJobs.forEach(job => {
|
|
||||||
const { data, id } = job;
|
|
||||||
if (!isRecord(data)) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} was missing valid data`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { messageId } = data;
|
|
||||||
if (typeof messageId !== 'string') {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-string messageId`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const message = getMessageById.get([messageId]);
|
|
||||||
if (!message) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: Unable to find message for ${queue.queueType} job ${id}`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { conversationId } = message;
|
|
||||||
if (typeof conversationId !== 'string') {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-string conversationId`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const oldReceipts = queue.jobDataIsArray
|
|
||||||
? data[queue.jobDataKey]
|
|
||||||
: [data[queue.jobDataKey]];
|
|
||||||
|
|
||||||
if (!Array.isArray(oldReceipts)) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-array ${queue.jobDataKey}`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newReceipts = [];
|
|
||||||
|
|
||||||
for (const receipt of oldReceipts) {
|
|
||||||
if (!isRecord(receipt)) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion78: ${queue.queueType} queue job ${id} had a non-record receipt`
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
newReceipts.push({
|
|
||||||
...receipt,
|
|
||||||
conversationId,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const newJob = {
|
|
||||||
...job,
|
|
||||||
queueType: 'conversation',
|
|
||||||
data: {
|
|
||||||
type: 'Receipts',
|
|
||||||
conversationId,
|
|
||||||
receiptsType: queue.newReceiptsType,
|
|
||||||
receipts: newReceipts,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
insertJob(db, newJob);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
db.pragma('user_version = 78');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion78: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,30 +3,14 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion79(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export default function updateToSchemaVersion79(
|
DROP INDEX messages_hasVisualMediaAttachments;
|
||||||
currentVersion: number,
|
CREATE INDEX messages_hasVisualMediaAttachments
|
||||||
db: Database,
|
ON messages (
|
||||||
logger: LoggerType
|
conversationId, isStory, storyId,
|
||||||
): void {
|
hasVisualMediaAttachments, received_at, sent_at
|
||||||
if (currentVersion >= 79) {
|
)
|
||||||
return;
|
WHERE hasVisualMediaAttachments IS 1;
|
||||||
}
|
`);
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
DROP INDEX messages_hasVisualMediaAttachments;
|
|
||||||
CREATE INDEX messages_hasVisualMediaAttachments
|
|
||||||
ON messages (
|
|
||||||
conversationId, isStory, storyId,
|
|
||||||
hasVisualMediaAttachments, received_at, sent_at
|
|
||||||
)
|
|
||||||
WHERE hasVisualMediaAttachments IS 1;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 79');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion79: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,32 +3,16 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion80(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE edited_messages(
|
||||||
|
fromId STRING,
|
||||||
|
messageId STRING REFERENCES messages(id)
|
||||||
|
ON DELETE CASCADE,
|
||||||
|
sentAt INTEGER,
|
||||||
|
readStatus INTEGER
|
||||||
|
);
|
||||||
|
|
||||||
export default function updateToSchemaVersion80(
|
CREATE INDEX edited_messages_sent_at ON edited_messages (sentAt);
|
||||||
currentVersion: number,
|
`);
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 80) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
CREATE TABLE edited_messages(
|
|
||||||
fromId STRING,
|
|
||||||
messageId STRING REFERENCES messages(id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
sentAt INTEGER,
|
|
||||||
readStatus INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX edited_messages_sent_at ON edited_messages (sentAt);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 80');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion80: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,116 +3,100 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion81(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`
|
||||||
|
--- These will be re-added below
|
||||||
|
DROP INDEX messages_preview;
|
||||||
|
DROP INDEX messages_preview_without_story;
|
||||||
|
DROP INDEX messages_activity;
|
||||||
|
DROP INDEX message_user_initiated;
|
||||||
|
|
||||||
export default function updateToSchemaVersion81(
|
--- These will also be re-added below
|
||||||
currentVersion: number,
|
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
||||||
db: Database,
|
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
||||||
logger: LoggerType
|
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
||||||
): void {
|
|
||||||
if (currentVersion >= 81) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
--- Note: These generated columns were previously modified in
|
||||||
db.exec(
|
--- migration 73, and are mostly the same
|
||||||
`
|
|
||||||
--- These will be re-added below
|
|
||||||
DROP INDEX messages_preview;
|
|
||||||
DROP INDEX messages_preview_without_story;
|
|
||||||
DROP INDEX messages_activity;
|
|
||||||
DROP INDEX message_user_initiated;
|
|
||||||
|
|
||||||
--- These will also be re-added below
|
--- (change: added contact-removed-notification)
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectActivity;
|
ALTER TABLE messages
|
||||||
ALTER TABLE messages DROP COLUMN shouldAffectPreview;
|
ADD COLUMN shouldAffectActivity INTEGER
|
||||||
ALTER TABLE messages DROP COLUMN isUserInitiatedMessage;
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'contact-removed-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- Note: These generated columns were previously modified in
|
--- (change: added contact-removed-notification)
|
||||||
--- migration 73, and are mostly the same
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN shouldAffectPreview INTEGER
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
type IS NULL
|
||||||
|
OR
|
||||||
|
type NOT IN (
|
||||||
|
'change-number-notification',
|
||||||
|
'contact-removed-notification',
|
||||||
|
'conversation-merge',
|
||||||
|
'group-v1-migration',
|
||||||
|
'keychange',
|
||||||
|
'message-history-unsynced',
|
||||||
|
'profile-change',
|
||||||
|
'story',
|
||||||
|
'universal-timer-notification',
|
||||||
|
'verified-change'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- (change: added contact-removed-notification)
|
--- (change: added contact-removed-notification)
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
ADD COLUMN shouldAffectActivity INTEGER
|
ADD COLUMN isUserInitiatedMessage INTEGER
|
||||||
GENERATED ALWAYS AS (
|
GENERATED ALWAYS AS (
|
||||||
type IS NULL
|
type IS NULL
|
||||||
OR
|
OR
|
||||||
type NOT IN (
|
type NOT IN (
|
||||||
'change-number-notification',
|
'change-number-notification',
|
||||||
'contact-removed-notification',
|
'contact-removed-notification',
|
||||||
'conversation-merge',
|
'conversation-merge',
|
||||||
'group-v1-migration',
|
'group-v1-migration',
|
||||||
'keychange',
|
'group-v2-change',
|
||||||
'message-history-unsynced',
|
'keychange',
|
||||||
'profile-change',
|
'message-history-unsynced',
|
||||||
'story',
|
'profile-change',
|
||||||
'universal-timer-notification',
|
'story',
|
||||||
'verified-change'
|
'universal-timer-notification',
|
||||||
)
|
'verified-change'
|
||||||
);
|
)
|
||||||
|
);
|
||||||
|
|
||||||
--- (change: added contact-removed-notification)
|
--- From migration 76
|
||||||
ALTER TABLE messages
|
CREATE INDEX messages_preview ON messages
|
||||||
ADD COLUMN shouldAffectPreview INTEGER
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
GENERATED ALWAYS AS (
|
received_at, sent_at);
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'contact-removed-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- (change: added contact-removed-notification)
|
--- From migration 76
|
||||||
ALTER TABLE messages
|
CREATE INDEX messages_preview_without_story ON messages
|
||||||
ADD COLUMN isUserInitiatedMessage INTEGER
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
GENERATED ALWAYS AS (
|
received_at, sent_at) WHERE storyId IS NULL;
|
||||||
type IS NULL
|
|
||||||
OR
|
|
||||||
type NOT IN (
|
|
||||||
'change-number-notification',
|
|
||||||
'contact-removed-notification',
|
|
||||||
'conversation-merge',
|
|
||||||
'group-v1-migration',
|
|
||||||
'group-v2-change',
|
|
||||||
'keychange',
|
|
||||||
'message-history-unsynced',
|
|
||||||
'profile-change',
|
|
||||||
'story',
|
|
||||||
'universal-timer-notification',
|
|
||||||
'verified-change'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
--- From migration 76
|
--- From migration 73
|
||||||
CREATE INDEX messages_preview ON messages
|
CREATE INDEX messages_activity ON messages
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
received_at, sent_at);
|
|
||||||
|
|
||||||
--- From migration 76
|
--- From migration 74
|
||||||
CREATE INDEX messages_preview_without_story ON messages
|
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
`
|
||||||
received_at, sent_at) WHERE storyId IS NULL;
|
);
|
||||||
|
|
||||||
--- From migration 73
|
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync, isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
--- From migration 74
|
|
||||||
CREATE INDEX message_user_initiated ON messages (conversationId, isUserInitiatedMessage);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 81');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion81: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,27 +3,11 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion82(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
|
ALTER TABLE edited_messages DROP COLUMN fromId;
|
||||||
|
ALTER TABLE edited_messages ADD COLUMN conversationId STRING;
|
||||||
|
|
||||||
export default function updateToSchemaVersion82(
|
CREATE INDEX edited_messages_unread ON edited_messages (readStatus, conversationId);
|
||||||
currentVersion: number,
|
`);
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 82) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
ALTER TABLE edited_messages DROP COLUMN fromId;
|
|
||||||
ALTER TABLE edited_messages ADD COLUMN conversationId STRING;
|
|
||||||
|
|
||||||
CREATE INDEX edited_messages_unread ON edited_messages (readStatus, conversationId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 82');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion82: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,37 +2,22 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export default function updateToSchemaVersion83(
|
export default function updateToSchemaVersion83(db: Database): void {
|
||||||
currentVersion: number,
|
db.exec(
|
||||||
db: Database,
|
`
|
||||||
logger: LoggerType
|
ALTER TABLE messages
|
||||||
): void {
|
ADD COLUMN mentionsMe INTEGER NOT NULL DEFAULT 0;
|
||||||
if (currentVersion >= 83) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
-- one which includes story data...
|
||||||
db.exec(
|
CREATE INDEX messages_unread_mentions ON messages
|
||||||
`
|
(conversationId, readStatus, mentionsMe, isStory, storyId, received_at, sent_at)
|
||||||
ALTER TABLE messages
|
WHERE readStatus IS NOT NULL;
|
||||||
ADD COLUMN mentionsMe INTEGER NOT NULL DEFAULT 0;
|
|
||||||
|
|
||||||
-- one which includes story data...
|
|
||||||
CREATE INDEX messages_unread_mentions ON messages
|
|
||||||
(conversationId, readStatus, mentionsMe, isStory, storyId, received_at, sent_at)
|
|
||||||
WHERE readStatus IS NOT NULL;
|
|
||||||
|
|
||||||
-- ...and one which doesn't, so storyPredicate works as expected
|
|
||||||
CREATE INDEX messages_unread_mentions_no_story_id ON messages
|
|
||||||
(conversationId, readStatus, mentionsMe, isStory, received_at, sent_at)
|
|
||||||
WHERE isStory IS 0 AND readStatus IS NOT NULL;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 83');
|
-- ...and one which doesn't, so storyPredicate works as expected
|
||||||
})();
|
CREATE INDEX messages_unread_mentions_no_story_id ON messages
|
||||||
|
(conversationId, readStatus, mentionsMe, isStory, received_at, sent_at)
|
||||||
logger.info('updateToSchemaVersion83: success!');
|
WHERE isStory IS 0 AND readStatus IS NOT NULL;
|
||||||
|
`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,59 +2,44 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export default function updateToSchemaVersion84(
|
export default function updateToSchemaVersion84(db: Database): void {
|
||||||
currentVersion: number,
|
const selectMentionsFromMessages = `
|
||||||
db: Database,
|
SELECT messages.id, bodyRanges.value ->> 'mentionUuid' as mentionUuid, bodyRanges.value ->> 'start' as start, bodyRanges.value ->> 'length' as length
|
||||||
logger: LoggerType
|
FROM messages, json_each(messages.json ->> 'bodyRanges') as bodyRanges
|
||||||
): void {
|
WHERE bodyRanges.value ->> 'mentionUuid' IS NOT NULL
|
||||||
if (currentVersion >= 84) {
|
`;
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
db.exec(`
|
||||||
const selectMentionsFromMessages = `
|
DROP TABLE IF EXISTS mentions;
|
||||||
SELECT messages.id, bodyRanges.value ->> 'mentionUuid' as mentionUuid, bodyRanges.value ->> 'start' as start, bodyRanges.value ->> 'length' as length
|
|
||||||
FROM messages, json_each(messages.json ->> 'bodyRanges') as bodyRanges
|
|
||||||
WHERE bodyRanges.value ->> 'mentionUuid' IS NOT NULL
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(`
|
CREATE TABLE mentions (
|
||||||
DROP TABLE IF EXISTS mentions;
|
messageId REFERENCES messages(id) ON DELETE CASCADE,
|
||||||
|
mentionUuid STRING,
|
||||||
|
start INTEGER,
|
||||||
|
length INTEGER
|
||||||
|
);
|
||||||
|
|
||||||
CREATE TABLE mentions (
|
CREATE INDEX mentions_uuid ON mentions (mentionUuid);
|
||||||
messageId REFERENCES messages(id) ON DELETE CASCADE,
|
|
||||||
mentionUuid STRING,
|
|
||||||
start INTEGER,
|
|
||||||
length INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX mentions_uuid ON mentions (mentionUuid);
|
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
||||||
|
${selectMentionsFromMessages};
|
||||||
|
|
||||||
|
-- Note: any changes to this trigger must be reflected in
|
||||||
|
-- Server.ts: enableMessageInsertTriggersAndBackfill
|
||||||
|
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
|
||||||
|
BEGIN
|
||||||
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
||||||
${selectMentionsFromMessages};
|
${selectMentionsFromMessages}
|
||||||
|
AND messages.id = new.id;
|
||||||
|
END;
|
||||||
|
|
||||||
-- Note: any changes to this trigger must be reflected in
|
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
|
||||||
-- Server.ts: enableMessageInsertTriggersAndBackfill
|
BEGIN
|
||||||
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
|
DELETE FROM mentions WHERE messageId = new.id;
|
||||||
BEGIN
|
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
||||||
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
${selectMentionsFromMessages}
|
||||||
${selectMentionsFromMessages}
|
AND messages.id = new.id;
|
||||||
AND messages.id = new.id;
|
END;
|
||||||
END;
|
`);
|
||||||
|
|
||||||
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
|
|
||||||
BEGIN
|
|
||||||
DELETE FROM mentions WHERE messageId = new.id;
|
|
||||||
INSERT INTO mentions (messageId, mentionUuid, start, length)
|
|
||||||
${selectMentionsFromMessages}
|
|
||||||
AND messages.id = new.id;
|
|
||||||
END;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 84');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion84: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,40 +3,24 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion85(db: Database): void {
|
||||||
|
db.exec(
|
||||||
|
`CREATE TABLE kyberPreKeys(
|
||||||
|
id STRING PRIMARY KEY NOT NULL,
|
||||||
|
json TEXT NOT NULL,
|
||||||
|
ourUuid STRING
|
||||||
|
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'))
|
||||||
|
);`
|
||||||
|
);
|
||||||
|
|
||||||
export default function updateToSchemaVersion85(
|
// To manage our ACI or PNI keys quickly
|
||||||
currentVersion: number,
|
db.exec('CREATE INDEX kyberPreKeys_ourUuid ON kyberPreKeys (ourUuid);');
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 85) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
// Add time to all existing preKeys to allow us to expire them
|
||||||
db.exec(
|
const now = Date.now();
|
||||||
`CREATE TABLE kyberPreKeys(
|
db.exec(
|
||||||
id STRING PRIMARY KEY NOT NULL,
|
`UPDATE preKeys SET
|
||||||
json TEXT NOT NULL,
|
json = json_set(json, '$.createdAt', ${now});
|
||||||
ourUuid STRING
|
`
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourUuid'))
|
);
|
||||||
);`
|
|
||||||
);
|
|
||||||
|
|
||||||
// To manage our ACI or PNI keys quickly
|
|
||||||
db.exec('CREATE INDEX kyberPreKeys_ourUuid ON kyberPreKeys (ourUuid);');
|
|
||||||
|
|
||||||
// Add time to all existing preKeys to allow us to expire them
|
|
||||||
const now = Date.now();
|
|
||||||
db.exec(
|
|
||||||
`UPDATE preKeys SET
|
|
||||||
json = json_set(json, '$.createdAt', ${now});
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 85');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion85: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,30 +3,14 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
export default function updateToSchemaVersion86(db: Database): void {
|
||||||
|
// The key reason for this new schema is that all of our previous schemas start with
|
||||||
export default function updateToSchemaVersion86(
|
// conversationId. This query is meant to find all replies to a given story, no
|
||||||
currentVersion: number,
|
// matter the conversation.
|
||||||
db: Database,
|
db.exec(
|
||||||
logger: LoggerType
|
`CREATE INDEX messages_story_replies
|
||||||
): void {
|
ON messages (storyId, received_at, sent_at)
|
||||||
if (currentVersion >= 86) {
|
WHERE isStory IS 0;
|
||||||
return;
|
`
|
||||||
}
|
);
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// The key reason for this new schema is that all of our previous schemas start with
|
|
||||||
// conversationId. This query is meant to find all replies to a given story, no
|
|
||||||
// matter the conversation.
|
|
||||||
db.exec(
|
|
||||||
`CREATE INDEX messages_story_replies
|
|
||||||
ON messages (storyId, received_at, sent_at)
|
|
||||||
WHERE isStory IS 0;
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.pragma('user_version = 86');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion86: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
|
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
import { cleanKeys } from './920-clean-more-keys';
|
import { cleanKeys } from './920-clean-more-keys';
|
||||||
|
import type { LoggerType } from '../../types/Logging';
|
||||||
import { sqlFragment } from '../util';
|
import { sqlFragment } from '../util';
|
||||||
|
|
||||||
// Note: for many users, this is not what ran for them as migration 87. You can see that
|
// Note: for many users, this is not what ran for them as migration 87. You can see that
|
||||||
|
@ -13,42 +13,38 @@ import { sqlFragment } from '../util';
|
||||||
// The goal of this migration is to ensure that key cleanup happens before migration 88.
|
// The goal of this migration is to ensure that key cleanup happens before migration 88.
|
||||||
|
|
||||||
export default function updateToSchemaVersion87(
|
export default function updateToSchemaVersion87(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType,
|
||||||
|
startingVersion: number
|
||||||
): void {
|
): void {
|
||||||
// We're checking for the version of the next migration here, not this version. We want
|
// We're checking for the version of the next migration here, not this version. We want
|
||||||
// this to run if the user hasn't yet successfully run migration 88.
|
// this to run if the user hasn't yet successfully run migration 88.
|
||||||
if (currentVersion >= 88) {
|
if (startingVersion >= 88) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
cleanKeys(
|
||||||
cleanKeys(
|
db,
|
||||||
db,
|
logger,
|
||||||
logger,
|
'(cleanup)/kyberPreKeys',
|
||||||
'updateToSchemaVersion87(cleanup)/kyberPreKeys',
|
sqlFragment`kyberPreKeys`,
|
||||||
sqlFragment`kyberPreKeys`,
|
sqlFragment`createdAt`,
|
||||||
sqlFragment`createdAt`,
|
sqlFragment`ourUuid`
|
||||||
sqlFragment`ourUuid`
|
);
|
||||||
);
|
cleanKeys(
|
||||||
cleanKeys(
|
db,
|
||||||
db,
|
logger,
|
||||||
logger,
|
'(cleanup)/preKeys',
|
||||||
'updateToSchemaVersion87(cleanup)/preKeys',
|
sqlFragment`preKeys`,
|
||||||
sqlFragment`preKeys`,
|
sqlFragment`createdAt`,
|
||||||
sqlFragment`createdAt`,
|
sqlFragment`ourUuid`
|
||||||
sqlFragment`ourUuid`
|
);
|
||||||
);
|
cleanKeys(
|
||||||
cleanKeys(
|
db,
|
||||||
db,
|
logger,
|
||||||
logger,
|
'(cleanup)/signedPreKeys',
|
||||||
'updateToSchemaVersion87(cleanup)/signedPreKeys',
|
sqlFragment`signedPreKeys`,
|
||||||
sqlFragment`signedPreKeys`,
|
sqlFragment`created_at`,
|
||||||
sqlFragment`created_at`,
|
sqlFragment`ourUuid`
|
||||||
sqlFragment`ourUuid`
|
);
|
||||||
);
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion87(cleanup): success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,14 +26,9 @@ import { isNotNil } from '../../util/isNotNil';
|
||||||
//
|
//
|
||||||
|
|
||||||
export default function updateToSchemaVersion88(
|
export default function updateToSchemaVersion88(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 88) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// See updateToSchemaVersion84
|
// See updateToSchemaVersion84
|
||||||
const selectMentionsFromMessages = `
|
const selectMentionsFromMessages = `
|
||||||
SELECT messages.id, bodyRanges.value ->> 'mentionAci' as mentionAci,
|
SELECT messages.id, bodyRanges.value ->> 'mentionAci' as mentionAci,
|
||||||
|
@ -43,205 +38,199 @@ export default function updateToSchemaVersion88(
|
||||||
WHERE bodyRanges.value ->> 'mentionAci' IS NOT NULL
|
WHERE bodyRanges.value ->> 'mentionAci' IS NOT NULL
|
||||||
`;
|
`;
|
||||||
|
|
||||||
db.transaction(() => {
|
// Rename all columns and re-create all indexes first.
|
||||||
// Rename all columns and re-create all indexes first.
|
db.exec(`
|
||||||
db.exec(`
|
--
|
||||||
--
|
-- conversations
|
||||||
-- conversations
|
--
|
||||||
--
|
|
||||||
|
|
||||||
DROP INDEX conversations_uuid;
|
DROP INDEX conversations_uuid;
|
||||||
|
|
||||||
ALTER TABLE conversations
|
ALTER TABLE conversations
|
||||||
RENAME COLUMN uuid TO serviceId;
|
RENAME COLUMN uuid TO serviceId;
|
||||||
|
|
||||||
-- See: updateToSchemaVersion20
|
-- See: updateToSchemaVersion20
|
||||||
CREATE INDEX conversations_serviceId ON conversations(serviceId);
|
CREATE INDEX conversations_serviceId ON conversations(serviceId);
|
||||||
|
|
||||||
--
|
--
|
||||||
-- sessions
|
-- sessions
|
||||||
--
|
--
|
||||||
|
|
||||||
ALTER TABLE sessions
|
ALTER TABLE sessions
|
||||||
RENAME COLUMN ourUuid TO ourServiceId;
|
RENAME COLUMN ourUuid TO ourServiceId;
|
||||||
ALTER TABLE sessions
|
ALTER TABLE sessions
|
||||||
RENAME COLUMN uuid TO serviceId;
|
RENAME COLUMN uuid TO serviceId;
|
||||||
|
|
||||||
--
|
--
|
||||||
-- messages
|
-- messages
|
||||||
--
|
--
|
||||||
|
|
||||||
DROP INDEX messages_sourceUuid;
|
DROP INDEX messages_sourceUuid;
|
||||||
DROP INDEX messages_preview;
|
DROP INDEX messages_preview;
|
||||||
DROP INDEX messages_preview_without_story;
|
DROP INDEX messages_preview_without_story;
|
||||||
DROP INDEX messages_activity;
|
DROP INDEX messages_activity;
|
||||||
|
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
DROP COLUMN isGroupLeaveEventFromOther;
|
DROP COLUMN isGroupLeaveEventFromOther;
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
DROP COLUMN isGroupLeaveEvent;
|
DROP COLUMN isGroupLeaveEvent;
|
||||||
|
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
RENAME COLUMN sourceUuid TO sourceServiceId;
|
RENAME COLUMN sourceUuid TO sourceServiceId;
|
||||||
|
|
||||||
-- See: updateToSchemaVersion47
|
-- See: updateToSchemaVersion47
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
ADD COLUMN isGroupLeaveEvent INTEGER
|
ADD COLUMN isGroupLeaveEvent INTEGER
|
||||||
GENERATED ALWAYS AS (
|
GENERATED ALWAYS AS (
|
||||||
type IS 'group-v2-change' AND
|
type IS 'group-v2-change' AND
|
||||||
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
|
json_array_length(json_extract(json, '$.groupV2Change.details')) IS 1 AND
|
||||||
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
|
json_extract(json, '$.groupV2Change.details[0].type') IS 'member-remove' AND
|
||||||
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
|
json_extract(json, '$.groupV2Change.from') IS NOT NULL AND
|
||||||
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].aci')
|
json_extract(json, '$.groupV2Change.from') IS json_extract(json, '$.groupV2Change.details[0].aci')
|
||||||
);
|
|
||||||
|
|
||||||
ALTER TABLE messages
|
|
||||||
ADD COLUMN isGroupLeaveEventFromOther INTEGER
|
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
isGroupLeaveEvent IS 1
|
|
||||||
AND
|
|
||||||
isChangeCreatedByUs IS 0
|
|
||||||
);
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion25
|
|
||||||
CREATE INDEX messages_sourceServiceId on messages(sourceServiceId);
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion81
|
|
||||||
CREATE INDEX messages_preview ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
|
||||||
received_at, sent_at);
|
|
||||||
CREATE INDEX messages_preview_without_story ON messages
|
|
||||||
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
|
||||||
received_at, sent_at) WHERE storyId IS NULL;
|
|
||||||
CREATE INDEX messages_activity ON messages
|
|
||||||
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
|
|
||||||
isGroupLeaveEventFromOther, received_at, sent_at);
|
|
||||||
|
|
||||||
--
|
|
||||||
-- reactions
|
|
||||||
--
|
|
||||||
|
|
||||||
DROP INDEX reaction_identifier;
|
|
||||||
|
|
||||||
ALTER TABLE reactions
|
|
||||||
RENAME COLUMN targetAuthorUuid TO targetAuthorAci;
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion29
|
|
||||||
CREATE INDEX reaction_identifier ON reactions (
|
|
||||||
emoji,
|
|
||||||
targetAuthorAci,
|
|
||||||
targetTimestamp
|
|
||||||
);
|
);
|
||||||
|
|
||||||
--
|
ALTER TABLE messages
|
||||||
-- unprocessed
|
ADD COLUMN isGroupLeaveEventFromOther INTEGER
|
||||||
--
|
GENERATED ALWAYS AS (
|
||||||
|
isGroupLeaveEvent IS 1
|
||||||
|
AND
|
||||||
|
isChangeCreatedByUs IS 0
|
||||||
|
);
|
||||||
|
|
||||||
ALTER TABLE unprocessed
|
-- See: updateToSchemaVersion25
|
||||||
RENAME COLUMN sourceUuid TO sourceServiceId;
|
CREATE INDEX messages_sourceServiceId on messages(sourceServiceId);
|
||||||
|
|
||||||
--
|
-- See: updateToSchemaVersion81
|
||||||
-- sendLogRecipients
|
CREATE INDEX messages_preview ON messages
|
||||||
--
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
|
received_at, sent_at);
|
||||||
|
CREATE INDEX messages_preview_without_story ON messages
|
||||||
|
(conversationId, shouldAffectPreview, isGroupLeaveEventFromOther,
|
||||||
|
received_at, sent_at) WHERE storyId IS NULL;
|
||||||
|
CREATE INDEX messages_activity ON messages
|
||||||
|
(conversationId, shouldAffectActivity, isTimerChangeFromSync,
|
||||||
|
isGroupLeaveEventFromOther, received_at, sent_at);
|
||||||
|
|
||||||
DROP INDEX sendLogRecipientsByRecipient;
|
--
|
||||||
|
-- reactions
|
||||||
|
--
|
||||||
|
|
||||||
ALTER TABLE sendLogRecipients
|
DROP INDEX reaction_identifier;
|
||||||
RENAME COLUMN recipientUuid TO recipientServiceId;
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion37
|
ALTER TABLE reactions
|
||||||
CREATE INDEX sendLogRecipientsByRecipient
|
RENAME COLUMN targetAuthorUuid TO targetAuthorAci;
|
||||||
ON sendLogRecipients (recipientServiceId, deviceId);
|
|
||||||
|
|
||||||
--
|
-- See: updateToSchemaVersion29
|
||||||
-- storyDistributionMembers
|
CREATE INDEX reaction_identifier ON reactions (
|
||||||
--
|
emoji,
|
||||||
|
targetAuthorAci,
|
||||||
|
targetTimestamp
|
||||||
|
);
|
||||||
|
|
||||||
ALTER TABLE storyDistributionMembers
|
--
|
||||||
RENAME COLUMN uuid TO serviceId;
|
-- unprocessed
|
||||||
|
--
|
||||||
|
|
||||||
--
|
ALTER TABLE unprocessed
|
||||||
-- mentions
|
RENAME COLUMN sourceUuid TO sourceServiceId;
|
||||||
--
|
|
||||||
|
|
||||||
DROP TRIGGER messages_on_update;
|
--
|
||||||
DROP TRIGGER messages_on_insert_insert_mentions;
|
-- sendLogRecipients
|
||||||
DROP TRIGGER messages_on_update_update_mentions;
|
--
|
||||||
DROP INDEX mentions_uuid;
|
|
||||||
|
|
||||||
ALTER TABLE mentions
|
DROP INDEX sendLogRecipientsByRecipient;
|
||||||
RENAME COLUMN mentionUuid TO mentionAci;
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion84
|
ALTER TABLE sendLogRecipients
|
||||||
CREATE INDEX mentions_aci ON mentions (mentionAci);
|
RENAME COLUMN recipientUuid TO recipientServiceId;
|
||||||
|
|
||||||
--
|
-- See: updateToSchemaVersion37
|
||||||
-- preKeys
|
CREATE INDEX sendLogRecipientsByRecipient
|
||||||
--
|
ON sendLogRecipients (recipientServiceId, deviceId);
|
||||||
|
|
||||||
DROP INDEX preKeys_ourUuid;
|
--
|
||||||
DROP INDEX signedPreKeys_ourUuid;
|
-- storyDistributionMembers
|
||||||
DROP INDEX kyberPreKeys_ourUuid;
|
--
|
||||||
|
|
||||||
ALTER TABLE preKeys
|
ALTER TABLE storyDistributionMembers
|
||||||
RENAME COLUMN ourUuid TO ourServiceId;
|
RENAME COLUMN uuid TO serviceId;
|
||||||
ALTER TABLE signedPreKeys
|
|
||||||
RENAME COLUMN ourUuid TO ourServiceId;
|
|
||||||
ALTER TABLE kyberPreKeys
|
|
||||||
RENAME COLUMN ourUuid TO ourServiceId;
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion64
|
--
|
||||||
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
|
-- mentions
|
||||||
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
|
--
|
||||||
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Migrate JSON fields
|
DROP TRIGGER messages_on_update;
|
||||||
const { identifierToServiceId } = migrateConversations(db, logger);
|
DROP TRIGGER messages_on_insert_insert_mentions;
|
||||||
const ourServiceIds = migrateItems(db, logger);
|
DROP TRIGGER messages_on_update_update_mentions;
|
||||||
migrateSessions(db, ourServiceIds, logger);
|
DROP INDEX mentions_uuid;
|
||||||
migrateMessages(db, logger);
|
|
||||||
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
|
|
||||||
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
|
|
||||||
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
|
|
||||||
migrateJobs(db, identifierToServiceId, logger);
|
|
||||||
|
|
||||||
// Re-create triggers after updating messages
|
ALTER TABLE mentions
|
||||||
db.exec(`
|
RENAME COLUMN mentionUuid TO mentionAci;
|
||||||
-- See: updateToSchemaVersion45
|
|
||||||
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
|
|
||||||
WHEN
|
|
||||||
(new.body IS NULL OR old.body IS NOT new.body) AND
|
|
||||||
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
|
||||||
BEGIN
|
|
||||||
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
|
||||||
INSERT INTO messages_fts
|
|
||||||
(rowid, body)
|
|
||||||
VALUES
|
|
||||||
(new.rowid, new.body);
|
|
||||||
END;
|
|
||||||
|
|
||||||
-- See: updateToSchemaVersion84
|
-- See: updateToSchemaVersion84
|
||||||
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
|
CREATE INDEX mentions_aci ON mentions (mentionAci);
|
||||||
BEGIN
|
|
||||||
INSERT INTO mentions (messageId, mentionAci, start, length)
|
|
||||||
${selectMentionsFromMessages}
|
|
||||||
AND messages.id = new.id;
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
|
--
|
||||||
BEGIN
|
-- preKeys
|
||||||
DELETE FROM mentions WHERE messageId = new.id;
|
--
|
||||||
INSERT INTO mentions (messageId, mentionAci, start, length)
|
|
||||||
${selectMentionsFromMessages}
|
|
||||||
AND messages.id = new.id;
|
|
||||||
END;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 88');
|
DROP INDEX preKeys_ourUuid;
|
||||||
})();
|
DROP INDEX signedPreKeys_ourUuid;
|
||||||
|
DROP INDEX kyberPreKeys_ourUuid;
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion88: success!');
|
ALTER TABLE preKeys
|
||||||
|
RENAME COLUMN ourUuid TO ourServiceId;
|
||||||
|
ALTER TABLE signedPreKeys
|
||||||
|
RENAME COLUMN ourUuid TO ourServiceId;
|
||||||
|
ALTER TABLE kyberPreKeys
|
||||||
|
RENAME COLUMN ourUuid TO ourServiceId;
|
||||||
|
|
||||||
|
-- See: updateToSchemaVersion64
|
||||||
|
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
|
||||||
|
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
|
||||||
|
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Migrate JSON fields
|
||||||
|
const { identifierToServiceId } = migrateConversations(db, logger);
|
||||||
|
const ourServiceIds = migrateItems(db, logger);
|
||||||
|
migrateSessions(db, ourServiceIds, logger);
|
||||||
|
migrateMessages(db, logger);
|
||||||
|
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
|
||||||
|
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
|
||||||
|
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
|
||||||
|
migrateJobs(db, identifierToServiceId, logger);
|
||||||
|
|
||||||
|
// Re-create triggers after updating messages
|
||||||
|
db.exec(`
|
||||||
|
-- See: updateToSchemaVersion45
|
||||||
|
CREATE TRIGGER messages_on_update AFTER UPDATE ON messages
|
||||||
|
WHEN
|
||||||
|
(new.body IS NULL OR old.body IS NOT new.body) AND
|
||||||
|
new.isViewOnce IS NOT 1 AND new.storyId IS NULL
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM messages_fts WHERE rowid = old.rowid;
|
||||||
|
INSERT INTO messages_fts
|
||||||
|
(rowid, body)
|
||||||
|
VALUES
|
||||||
|
(new.rowid, new.body);
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- See: updateToSchemaVersion84
|
||||||
|
CREATE TRIGGER messages_on_insert_insert_mentions AFTER INSERT ON messages
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO mentions (messageId, mentionAci, start, length)
|
||||||
|
${selectMentionsFromMessages}
|
||||||
|
AND messages.id = new.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER messages_on_update_update_mentions AFTER UPDATE ON messages
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM mentions WHERE messageId = new.id;
|
||||||
|
INSERT INTO mentions (messageId, mentionAci, start, length)
|
||||||
|
${selectMentionsFromMessages}
|
||||||
|
AND messages.id = new.id;
|
||||||
|
END;
|
||||||
|
`);
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
@ -338,9 +327,7 @@ function migrateConversations(
|
||||||
'UPDATE conversations SET json = $json WHERE id IS $id'
|
'UPDATE conversations SET json = $json WHERE id IS $id'
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info(
|
logger.info(`updating ${convos.length} conversations`);
|
||||||
`updateToSchemaVersion88: updating ${convos.length} conversations`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Build lookup map for senderKeyInfo
|
// Build lookup map for senderKeyInfo
|
||||||
const identifierToServiceId = new Map<string, ServiceIdString>();
|
const identifierToServiceId = new Map<string, ServiceIdString>();
|
||||||
|
@ -439,9 +426,7 @@ function migrateConversations(
|
||||||
.map(({ identifier, ...rest }) => {
|
.map(({ identifier, ...rest }) => {
|
||||||
const deviceServiceId = identifierToServiceId.get(identifier);
|
const deviceServiceId = identifierToServiceId.get(identifier);
|
||||||
if (!deviceServiceId) {
|
if (!deviceServiceId) {
|
||||||
logger.warn(
|
logger.warn(`failed to resolve identifier ${identifier}`);
|
||||||
`updateToSchemaVersion88: failed to resolve identifier ${identifier}`
|
|
||||||
);
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -454,10 +439,7 @@ function migrateConversations(
|
||||||
|
|
||||||
updateStmt.run({ id, json: JSON.stringify(modern) });
|
updateStmt.run({ id, json: JSON.stringify(modern) });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse convo ${id} json`, error);
|
||||||
`updateToSchemaVersion88: failed to parse convo ${id} json`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -511,12 +493,9 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
|
||||||
[legacyAci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
|
[legacyAci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (uuidIdJson) {
|
if (uuidIdJson) {
|
||||||
logger.warn(
|
logger.warn('failed to parse uuid_id item', error);
|
||||||
'updateToSchemaVersion88: failed to parse uuid_id item',
|
|
||||||
error
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
logger.info('updateToSchemaVersion88: Our UUID not found');
|
logger.info('Our UUID not found');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -525,9 +504,9 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
|
||||||
legacyPni = JSON.parse(pniJson ?? '').value;
|
legacyPni = JSON.parse(pniJson ?? '').value;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (pniJson) {
|
if (pniJson) {
|
||||||
logger.warn('updateToSchemaVersion88: failed to parse pni item', error);
|
logger.warn('failed to parse pni item', error);
|
||||||
} else {
|
} else {
|
||||||
logger.info('updateToSchemaVersion88: Our PNI not found');
|
logger.info('Our PNI not found');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -573,7 +552,7 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds {
|
||||||
|
|
||||||
updateStmt.run({ id, json: JSON.stringify(data) });
|
updateStmt.run({ id, json: JSON.stringify(data) });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(`updateToSchemaVersion88: failed to parse ${id} item`, error);
|
logger.warn(`failed to parse ${id} item`, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return { aci, pni, legacyAci, legacyPni };
|
return { aci, pni, legacyAci, legacyPni };
|
||||||
|
@ -611,21 +590,18 @@ function migrateSessions(
|
||||||
`
|
`
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion88: updating ${sessions.length} sessions`);
|
logger.info(`updating ${sessions.length} sessions`);
|
||||||
for (const { id, serviceId, ourServiceId, json } of sessions) {
|
for (const { id, serviceId, ourServiceId, json } of sessions) {
|
||||||
const match = id.match(/^(.*):(.*)\.(.*)$/);
|
const match = id.match(/^(.*):(.*)\.(.*)$/);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
logger.warn(`updateToSchemaVersion88: invalid session id ${id}`);
|
logger.warn(`invalid session id ${id}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let legacyData: JSONWithUnknownFields<Record<string, unknown>>;
|
let legacyData: JSONWithUnknownFields<Record<string, unknown>>;
|
||||||
try {
|
try {
|
||||||
legacyData = JSON.parse(json);
|
legacyData = JSON.parse(json);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse session ${id}`, error);
|
||||||
`updateToSchemaVersion88: failed to parse session ${id}`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -642,7 +618,7 @@ function migrateSessions(
|
||||||
);
|
);
|
||||||
if (!newServiceId || !newOurServiceId) {
|
if (!newServiceId || !newOurServiceId) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
'updateToSchemaVersion88: failed to normalize session service ids',
|
'failed to normalize session service ids',
|
||||||
serviceId,
|
serviceId,
|
||||||
ourServiceId
|
ourServiceId
|
||||||
);
|
);
|
||||||
|
@ -753,7 +729,7 @@ function migrateMessages(db: Database, logger: LoggerType): void {
|
||||||
WHERE rowid = $rowid
|
WHERE rowid = $rowid
|
||||||
`);
|
`);
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion88: updating messages');
|
logger.info('updating messages');
|
||||||
|
|
||||||
let totalMessages = 0;
|
let totalMessages = 0;
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
@ -832,15 +808,12 @@ function migrateMessages(db: Database, logger: LoggerType): void {
|
||||||
json: JSON.stringify(updatedMessage),
|
json: JSON.stringify(updatedMessage),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse message ${id} json`, error);
|
||||||
`updateToSchemaVersion88: failed to parse message ${id} json`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion88: updated ${totalMessages} messages`);
|
logger.info(`updated ${totalMessages} messages`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// migratePreKeys works similarly to migrateSessions and does:
|
// migratePreKeys works similarly to migrateSessions and does:
|
||||||
|
@ -867,11 +840,11 @@ function migratePreKeys(
|
||||||
WHERE id = $id
|
WHERE id = $id
|
||||||
`);
|
`);
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion88: updating ${preKeys.length} ${table}`);
|
logger.info(`updating ${preKeys.length} ${table}`);
|
||||||
for (const { id, json } of preKeys) {
|
for (const { id, json } of preKeys) {
|
||||||
const match = id.match(/^(.*):(.*)$/);
|
const match = id.match(/^(.*):(.*)$/);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
logger.warn(`updateToSchemaVersion88: invalid ${table} id ${id}`);
|
logger.warn(`invalid ${table} id ${id}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -879,10 +852,7 @@ function migratePreKeys(
|
||||||
try {
|
try {
|
||||||
legacyData = JSON.parse(json);
|
legacyData = JSON.parse(json);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse ${table} ${id}`, error);
|
||||||
`updateToSchemaVersion88: failed to parse ${table} ${id}`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1118,7 +1088,7 @@ function migrateJobs(
|
||||||
const serviceId = identifierToServiceId.get(identifier);
|
const serviceId = identifierToServiceId.get(identifier);
|
||||||
if (!serviceId) {
|
if (!serviceId) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`updateToSchemaVersion88: failed to resolve identifier ${identifier} ` +
|
`failed to resolve identifier ${identifier} ` +
|
||||||
`for job ${id}/${queueType}`
|
`for job ${id}/${queueType}`
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
|
@ -1137,14 +1107,11 @@ function migrateJobs(
|
||||||
updateStmt.run({ id, data: JSON.stringify(updatedData) });
|
updateStmt.run({ id, data: JSON.stringify(updatedData) });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to migrate job ${id}/${queueType} json`, error);
|
||||||
`updateToSchemaVersion88: failed to migrate job ${id}/${queueType} json`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion88: updated ${updatedCount} jobs`);
|
logger.info(`updated ${updatedCount} jobs`);
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
|
|
@ -94,7 +94,7 @@ function getPeerIdFromConversation(
|
||||||
if (conversation.type === 'private') {
|
if (conversation.type === 'private') {
|
||||||
if (conversation.serviceId == null) {
|
if (conversation.serviceId == null) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`updateToSchemaVersion89: Private conversation (${conversation.id}) was missing serviceId (discoveredUnregisteredAt: ${conversation.discoveredUnregisteredAt})`
|
`Private conversation (${conversation.id}) was missing serviceId (discoveredUnregisteredAt: ${conversation.discoveredUnregisteredAt})`
|
||||||
);
|
);
|
||||||
return conversation.id;
|
return conversation.id;
|
||||||
}
|
}
|
||||||
|
@ -191,205 +191,194 @@ function convertLegacyCallDetails(
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function updateToSchemaVersion89(
|
export default function updateToSchemaVersion89(
|
||||||
currentVersion: number,
|
|
||||||
db: WritableDB,
|
db: WritableDB,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 89) {
|
const ourUuid = getOurUuid(db);
|
||||||
return;
|
|
||||||
|
const [createTable] = sql`
|
||||||
|
-- This table may have already existed from migration 87
|
||||||
|
CREATE TABLE IF NOT EXISTS callsHistory (
|
||||||
|
callId TEXT PRIMARY KEY,
|
||||||
|
peerId TEXT NOT NULL, -- conversation id (legacy) | uuid | groupId | roomId
|
||||||
|
ringerId TEXT DEFAULT NULL, -- ringer uuid
|
||||||
|
mode TEXT NOT NULL, -- enum "Direct" | "Group"
|
||||||
|
type TEXT NOT NULL, -- enum "Audio" | "Video" | "Group"
|
||||||
|
direction TEXT NOT NULL, -- enum "Incoming" | "Outgoing
|
||||||
|
-- Direct: enum "Pending" | "Missed" | "Accepted" | "Deleted"
|
||||||
|
-- Group: enum "GenericGroupCall" | "OutgoingRing" | "Ringing" | "Joined" | "Missed" | "Declined" | "Accepted" | "Deleted"
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
timestamp INTEGER NOT NULL,
|
||||||
|
UNIQUE (callId, peerId) ON CONFLICT FAIL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Update peerId to be uuid or groupId
|
||||||
|
UPDATE callsHistory
|
||||||
|
SET peerId = (
|
||||||
|
SELECT
|
||||||
|
CASE
|
||||||
|
WHEN conversations.type = 'private' THEN conversations.serviceId
|
||||||
|
WHEN conversations.type = 'group' THEN conversations.groupId
|
||||||
|
END
|
||||||
|
FROM conversations
|
||||||
|
WHERE callsHistory.peerId IS conversations.id
|
||||||
|
AND callsHistory.peerId IS NOT conversations.serviceId
|
||||||
|
)
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM conversations
|
||||||
|
WHERE callsHistory.peerId IS conversations.id
|
||||||
|
AND callsHistory.peerId IS NOT conversations.serviceId
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS callsHistory_order on callsHistory (timestamp DESC);
|
||||||
|
CREATE INDEX IF NOT EXISTS callsHistory_byConversation ON callsHistory (peerId);
|
||||||
|
-- For 'getCallHistoryGroupData':
|
||||||
|
-- This index should target the subqueries for 'possible_parent' and 'possible_children'
|
||||||
|
CREATE INDEX IF NOT EXISTS callsHistory_callAndGroupInfo_optimize on callsHistory (
|
||||||
|
direction,
|
||||||
|
peerId,
|
||||||
|
timestamp DESC,
|
||||||
|
status
|
||||||
|
);
|
||||||
|
`;
|
||||||
|
|
||||||
|
db.exec(createTable);
|
||||||
|
|
||||||
|
const [selectQuery] = sql`
|
||||||
|
SELECT
|
||||||
|
messages.json AS messageJson,
|
||||||
|
conversations.id AS conversationId,
|
||||||
|
conversations.json AS conversationJson
|
||||||
|
FROM messages
|
||||||
|
LEFT JOIN conversations ON conversations.id = messages.conversationId
|
||||||
|
WHERE messages.type = 'call-history'
|
||||||
|
-- Some of these messages were already migrated
|
||||||
|
AND messages.json->'callHistoryDetails' IS NOT NULL
|
||||||
|
-- Sort from oldest to newest, so that newer messages can overwrite older
|
||||||
|
ORDER BY messages.received_at ASC, messages.sent_at ASC;
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Must match query above
|
||||||
|
type CallHistoryRow = {
|
||||||
|
messageJson: string;
|
||||||
|
conversationId: string;
|
||||||
|
conversationJson: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const rows: Array<CallHistoryRow> = db.prepare(selectQuery).all();
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
const { messageJson, conversationId, conversationJson } = row;
|
||||||
|
const message = jsonToObject<MessageWithCallHistoryDetails>(messageJson);
|
||||||
|
const conversation = jsonToObject<ConversationType>(conversationJson);
|
||||||
|
|
||||||
|
if (!isObject(conversation)) {
|
||||||
|
logger.warn(
|
||||||
|
`Private conversation (${conversationId}) ` +
|
||||||
|
'has non-object json column'
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const details = message.callHistoryDetails;
|
||||||
|
|
||||||
|
const peerId = getPeerIdFromConversation(conversation, logger);
|
||||||
|
|
||||||
|
const callHistory = convertLegacyCallDetails(
|
||||||
|
ourUuid,
|
||||||
|
peerId,
|
||||||
|
message,
|
||||||
|
details,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
|
||||||
|
const [insertQuery, insertParams] = sql`
|
||||||
|
-- Using 'OR REPLACE' because in some earlier versions of call history
|
||||||
|
-- we had a bug where we would insert duplicate call history entries
|
||||||
|
-- for the same callId and peerId.
|
||||||
|
-- We're assuming here that the latest call history entry is the most
|
||||||
|
-- accurate.
|
||||||
|
INSERT OR REPLACE INTO callsHistory (
|
||||||
|
callId,
|
||||||
|
peerId,
|
||||||
|
ringerId,
|
||||||
|
mode,
|
||||||
|
type,
|
||||||
|
direction,
|
||||||
|
status,
|
||||||
|
timestamp
|
||||||
|
) VALUES (
|
||||||
|
${callHistory.callId},
|
||||||
|
${callHistory.peerId},
|
||||||
|
${callHistory.ringerId},
|
||||||
|
${callHistory.mode},
|
||||||
|
${callHistory.type},
|
||||||
|
${callHistory.direction},
|
||||||
|
${callHistory.status},
|
||||||
|
${callHistory.timestamp}
|
||||||
|
)
|
||||||
|
`;
|
||||||
|
|
||||||
|
db.prepare(insertQuery).run(insertParams);
|
||||||
|
|
||||||
|
const messageId = message.id;
|
||||||
|
strictAssert(messageId != null, 'message.id must exist');
|
||||||
|
|
||||||
|
const [updateQuery, updateParams] = sql`
|
||||||
|
UPDATE messages
|
||||||
|
SET json = JSON_PATCH(json, ${JSON.stringify({
|
||||||
|
callHistoryDetails: null, // delete
|
||||||
|
callId: callHistory.callId,
|
||||||
|
})})
|
||||||
|
WHERE id = ${messageId}
|
||||||
|
`;
|
||||||
|
|
||||||
|
db.prepare(updateQuery).run(updateParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
const [dropIndex] = sql`
|
||||||
const ourUuid = getOurUuid(db);
|
DROP INDEX IF EXISTS messages_call;
|
||||||
|
`;
|
||||||
|
db.exec(dropIndex);
|
||||||
|
|
||||||
const [createTable] = sql`
|
try {
|
||||||
-- This table may have already existed from migration 87
|
const [dropColumnQuery] = sql`
|
||||||
CREATE TABLE IF NOT EXISTS callsHistory (
|
|
||||||
callId TEXT PRIMARY KEY,
|
|
||||||
peerId TEXT NOT NULL, -- conversation id (legacy) | uuid | groupId | roomId
|
|
||||||
ringerId TEXT DEFAULT NULL, -- ringer uuid
|
|
||||||
mode TEXT NOT NULL, -- enum "Direct" | "Group"
|
|
||||||
type TEXT NOT NULL, -- enum "Audio" | "Video" | "Group"
|
|
||||||
direction TEXT NOT NULL, -- enum "Incoming" | "Outgoing
|
|
||||||
-- Direct: enum "Pending" | "Missed" | "Accepted" | "Deleted"
|
|
||||||
-- Group: enum "GenericGroupCall" | "OutgoingRing" | "Ringing" | "Joined" | "Missed" | "Declined" | "Accepted" | "Deleted"
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
timestamp INTEGER NOT NULL,
|
|
||||||
UNIQUE (callId, peerId) ON CONFLICT FAIL
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Update peerId to be uuid or groupId
|
|
||||||
UPDATE callsHistory
|
|
||||||
SET peerId = (
|
|
||||||
SELECT
|
|
||||||
CASE
|
|
||||||
WHEN conversations.type = 'private' THEN conversations.serviceId
|
|
||||||
WHEN conversations.type = 'group' THEN conversations.groupId
|
|
||||||
END
|
|
||||||
FROM conversations
|
|
||||||
WHERE callsHistory.peerId IS conversations.id
|
|
||||||
AND callsHistory.peerId IS NOT conversations.serviceId
|
|
||||||
)
|
|
||||||
WHERE EXISTS (
|
|
||||||
SELECT 1
|
|
||||||
FROM conversations
|
|
||||||
WHERE callsHistory.peerId IS conversations.id
|
|
||||||
AND callsHistory.peerId IS NOT conversations.serviceId
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS callsHistory_order on callsHistory (timestamp DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS callsHistory_byConversation ON callsHistory (peerId);
|
|
||||||
-- For 'getCallHistoryGroupData':
|
|
||||||
-- This index should target the subqueries for 'possible_parent' and 'possible_children'
|
|
||||||
CREATE INDEX IF NOT EXISTS callsHistory_callAndGroupInfo_optimize on callsHistory (
|
|
||||||
direction,
|
|
||||||
peerId,
|
|
||||||
timestamp DESC,
|
|
||||||
status
|
|
||||||
);
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.exec(createTable);
|
|
||||||
|
|
||||||
const [selectQuery] = sql`
|
|
||||||
SELECT
|
|
||||||
messages.json AS messageJson,
|
|
||||||
conversations.id AS conversationId,
|
|
||||||
conversations.json AS conversationJson
|
|
||||||
FROM messages
|
|
||||||
LEFT JOIN conversations ON conversations.id = messages.conversationId
|
|
||||||
WHERE messages.type = 'call-history'
|
|
||||||
-- Some of these messages were already migrated
|
|
||||||
AND messages.json->'callHistoryDetails' IS NOT NULL
|
|
||||||
-- Sort from oldest to newest, so that newer messages can overwrite older
|
|
||||||
ORDER BY messages.received_at ASC, messages.sent_at ASC;
|
|
||||||
`;
|
|
||||||
|
|
||||||
// Must match query above
|
|
||||||
type CallHistoryRow = {
|
|
||||||
messageJson: string;
|
|
||||||
conversationId: string;
|
|
||||||
conversationJson: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
const rows: Array<CallHistoryRow> = db.prepare(selectQuery).all();
|
|
||||||
|
|
||||||
for (const row of rows) {
|
|
||||||
const { messageJson, conversationId, conversationJson } = row;
|
|
||||||
const message = jsonToObject<MessageWithCallHistoryDetails>(messageJson);
|
|
||||||
const conversation = jsonToObject<ConversationType>(conversationJson);
|
|
||||||
|
|
||||||
if (!isObject(conversation)) {
|
|
||||||
logger.warn(
|
|
||||||
`updateToSchemaVersion89: Private conversation (${conversationId}) ` +
|
|
||||||
'has non-object json column'
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const details = message.callHistoryDetails;
|
|
||||||
|
|
||||||
const peerId = getPeerIdFromConversation(conversation, logger);
|
|
||||||
|
|
||||||
const callHistory = convertLegacyCallDetails(
|
|
||||||
ourUuid,
|
|
||||||
peerId,
|
|
||||||
message,
|
|
||||||
details,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
|
|
||||||
const [insertQuery, insertParams] = sql`
|
|
||||||
-- Using 'OR REPLACE' because in some earlier versions of call history
|
|
||||||
-- we had a bug where we would insert duplicate call history entries
|
|
||||||
-- for the same callId and peerId.
|
|
||||||
-- We're assuming here that the latest call history entry is the most
|
|
||||||
-- accurate.
|
|
||||||
INSERT OR REPLACE INTO callsHistory (
|
|
||||||
callId,
|
|
||||||
peerId,
|
|
||||||
ringerId,
|
|
||||||
mode,
|
|
||||||
type,
|
|
||||||
direction,
|
|
||||||
status,
|
|
||||||
timestamp
|
|
||||||
) VALUES (
|
|
||||||
${callHistory.callId},
|
|
||||||
${callHistory.peerId},
|
|
||||||
${callHistory.ringerId},
|
|
||||||
${callHistory.mode},
|
|
||||||
${callHistory.type},
|
|
||||||
${callHistory.direction},
|
|
||||||
${callHistory.status},
|
|
||||||
${callHistory.timestamp}
|
|
||||||
)
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.prepare(insertQuery).run(insertParams);
|
|
||||||
|
|
||||||
const messageId = message.id;
|
|
||||||
strictAssert(messageId != null, 'message.id must exist');
|
|
||||||
|
|
||||||
const [updateQuery, updateParams] = sql`
|
|
||||||
UPDATE messages
|
|
||||||
SET json = JSON_PATCH(json, ${JSON.stringify({
|
|
||||||
callHistoryDetails: null, // delete
|
|
||||||
callId: callHistory.callId,
|
|
||||||
})})
|
|
||||||
WHERE id = ${messageId}
|
|
||||||
`;
|
|
||||||
|
|
||||||
db.prepare(updateQuery).run(updateParams);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [dropIndex] = sql`
|
|
||||||
DROP INDEX IF EXISTS messages_call;
|
|
||||||
`;
|
|
||||||
db.exec(dropIndex);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const [dropColumnQuery] = sql`
|
|
||||||
ALTER TABLE messages
|
|
||||||
DROP COLUMN callMode;
|
|
||||||
`;
|
|
||||||
db.exec(dropColumnQuery);
|
|
||||||
} catch (error) {
|
|
||||||
if (!error.message.includes('no such column: "callMode"')) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const [dropColumnQuery] = sql`
|
|
||||||
ALTER TABLE messages
|
|
||||||
DROP COLUMN callId;
|
|
||||||
`;
|
|
||||||
db.exec(dropColumnQuery);
|
|
||||||
} catch (error) {
|
|
||||||
if (!error.message.includes('no such column: "callId"')) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const [optimizeMessages] = sql`
|
|
||||||
ALTER TABLE messages
|
ALTER TABLE messages
|
||||||
ADD COLUMN callId TEXT
|
DROP COLUMN callMode;
|
||||||
GENERATED ALWAYS AS (
|
|
||||||
json_extract(json, '$.callId')
|
|
||||||
);
|
|
||||||
-- Optimize getCallHistoryMessageByCallId
|
|
||||||
CREATE INDEX messages_call ON messages
|
|
||||||
(conversationId, type, callId);
|
|
||||||
|
|
||||||
CREATE INDEX messages_callHistory_readStatus ON messages
|
|
||||||
(type, readStatus)
|
|
||||||
WHERE type IS 'call-history';
|
|
||||||
`;
|
`;
|
||||||
db.exec(optimizeMessages);
|
db.exec(dropColumnQuery);
|
||||||
|
} catch (error) {
|
||||||
|
if (!error.message.includes('no such column: "callMode"')) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
db.pragma('user_version = 89');
|
try {
|
||||||
})();
|
const [dropColumnQuery] = sql`
|
||||||
|
ALTER TABLE messages
|
||||||
|
DROP COLUMN callId;
|
||||||
|
`;
|
||||||
|
db.exec(dropColumnQuery);
|
||||||
|
} catch (error) {
|
||||||
|
if (!error.message.includes('no such column: "callId"')) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion89: success!');
|
const [optimizeMessages] = sql`
|
||||||
|
ALTER TABLE messages
|
||||||
|
ADD COLUMN callId TEXT
|
||||||
|
GENERATED ALWAYS AS (
|
||||||
|
json_extract(json, '$.callId')
|
||||||
|
);
|
||||||
|
-- Optimize getCallHistoryMessageByCallId
|
||||||
|
CREATE INDEX messages_call ON messages
|
||||||
|
(conversationId, type, callId);
|
||||||
|
|
||||||
|
CREATE INDEX messages_callHistory_readStatus ON messages
|
||||||
|
(type, readStatus)
|
||||||
|
WHERE type IS 'call-history';
|
||||||
|
`;
|
||||||
|
db.exec(optimizeMessages);
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,39 +7,28 @@ import type { LoggerType } from '../../types/Logging';
|
||||||
import { sql } from '../util';
|
import { sql } from '../util';
|
||||||
|
|
||||||
export default function updateToSchemaVersion90(
|
export default function updateToSchemaVersion90(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 90) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let numChanges = 0;
|
let numChanges = 0;
|
||||||
db.transaction(() => {
|
const [updateQuery, updateParams] = sql`
|
||||||
const [updateQuery, updateParams] = sql`
|
UPDATE messages
|
||||||
UPDATE messages
|
SET json = json_remove(json, '$.storyReplyContext.attachment.screenshotData')
|
||||||
SET json = json_remove(json, '$.storyReplyContext.attachment.screenshotData')
|
WHERE isStory = 0
|
||||||
WHERE isStory = 0
|
|
||||||
|
|
||||||
/* we want to find all messages with a non-null storyId, but using string
|
/* we want to find all messages with a non-null storyId, but using string
|
||||||
comparison (instead of a non-null check) here causes Sqlite to use the
|
comparison (instead of a non-null check) here causes Sqlite to use the
|
||||||
storyId index */
|
storyId index */
|
||||||
AND storyId > '0'
|
AND storyId > '0'
|
||||||
|
|
||||||
AND json->'$.storyReplyContext.attachment.screenshotData' IS NOT NULL;
|
AND json->'$.storyReplyContext.attachment.screenshotData' IS NOT NULL;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const info = db.prepare(updateQuery).run(updateParams);
|
const info = db.prepare(updateQuery).run(updateParams);
|
||||||
numChanges = info.changes;
|
numChanges = info.changes;
|
||||||
|
|
||||||
db.pragma('user_version = 90');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`updateToSchemaVersion90: removed screenshotData from ${numChanges} ` +
|
`removed screenshotData from ${numChanges} ` +
|
||||||
`message${numChanges > 1 ? 's' : ''}`
|
`message${numChanges > 1 ? 's' : ''}`
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion90: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,202 +10,180 @@ import { normalizePni } from '../../types/ServiceId';
|
||||||
import * as Errors from '../../types/errors';
|
import * as Errors from '../../types/errors';
|
||||||
|
|
||||||
export default function updateToSchemaVersion91(
|
export default function updateToSchemaVersion91(
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 91) {
|
// Fix the ourServiceId column so it's generated from the right JSON field
|
||||||
|
|
||||||
|
db.exec(`
|
||||||
|
--- First, prekeys
|
||||||
|
DROP INDEX preKeys_ourServiceId;
|
||||||
|
|
||||||
|
ALTER TABLE preKeys
|
||||||
|
DROP COLUMN ourServiceId;
|
||||||
|
ALTER TABLE preKeys
|
||||||
|
ADD COLUMN ourServiceId NUMBER
|
||||||
|
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
||||||
|
|
||||||
|
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
|
||||||
|
|
||||||
|
-- Second, kyber prekeys
|
||||||
|
|
||||||
|
DROP INDEX kyberPreKeys_ourServiceId;
|
||||||
|
|
||||||
|
ALTER TABLE kyberPreKeys
|
||||||
|
DROP COLUMN ourServiceId;
|
||||||
|
ALTER TABLE kyberPreKeys
|
||||||
|
ADD COLUMN ourServiceId NUMBER
|
||||||
|
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
||||||
|
|
||||||
|
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
|
||||||
|
|
||||||
|
-- Finally, signed prekeys
|
||||||
|
|
||||||
|
DROP INDEX signedPreKeys_ourServiceId;
|
||||||
|
|
||||||
|
ALTER TABLE signedPreKeys
|
||||||
|
DROP COLUMN ourServiceId;
|
||||||
|
ALTER TABLE signedPreKeys
|
||||||
|
ADD COLUMN ourServiceId NUMBER
|
||||||
|
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
||||||
|
|
||||||
|
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Do overall count - if it's less than 1000, move on
|
||||||
|
|
||||||
|
const totalKeys =
|
||||||
|
db
|
||||||
|
.prepare('SELECT count(*) FROM preKeys;', {
|
||||||
|
pluck: true,
|
||||||
|
})
|
||||||
|
.get<number>() ?? 0;
|
||||||
|
logger.info(`Found ${totalKeys} keys`);
|
||||||
|
if (totalKeys < 1000) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
// Grab our PNI
|
||||||
// Fix the ourServiceId column so it's generated from the right JSON field
|
|
||||||
|
|
||||||
db.exec(`
|
let pni: PniString;
|
||||||
--- First, prekeys
|
const pniJson = db
|
||||||
DROP INDEX preKeys_ourServiceId;
|
.prepare("SELECT json FROM items WHERE id IS 'pni'", {
|
||||||
|
pluck: true,
|
||||||
ALTER TABLE preKeys
|
})
|
||||||
DROP COLUMN ourServiceId;
|
.get<string>();
|
||||||
ALTER TABLE preKeys
|
try {
|
||||||
ADD COLUMN ourServiceId NUMBER
|
const pniData = JSON.parse(pniJson ?? '');
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
pni = normalizePni(pniData.value, 'updateToSchemaVersion91');
|
||||||
|
} catch (error) {
|
||||||
CREATE INDEX preKeys_ourServiceId ON preKeys (ourServiceId);
|
if (pniJson) {
|
||||||
|
logger.warn('PNI found but did not parse', Errors.toLogFormat(error));
|
||||||
-- Second, kyber prekeys
|
} else {
|
||||||
|
logger.info('Our PNI not found');
|
||||||
DROP INDEX kyberPreKeys_ourServiceId;
|
|
||||||
|
|
||||||
ALTER TABLE kyberPreKeys
|
|
||||||
DROP COLUMN ourServiceId;
|
|
||||||
ALTER TABLE kyberPreKeys
|
|
||||||
ADD COLUMN ourServiceId NUMBER
|
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
|
||||||
|
|
||||||
CREATE INDEX kyberPreKeys_ourServiceId ON kyberPreKeys (ourServiceId);
|
|
||||||
|
|
||||||
-- Finally, signed prekeys
|
|
||||||
|
|
||||||
DROP INDEX signedPreKeys_ourServiceId;
|
|
||||||
|
|
||||||
ALTER TABLE signedPreKeys
|
|
||||||
DROP COLUMN ourServiceId;
|
|
||||||
ALTER TABLE signedPreKeys
|
|
||||||
ADD COLUMN ourServiceId NUMBER
|
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.ourServiceId'));
|
|
||||||
|
|
||||||
CREATE INDEX signedPreKeys_ourServiceId ON signedPreKeys (ourServiceId);
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Do overall count - if it's less than 1000, move on
|
|
||||||
|
|
||||||
const totalKeys =
|
|
||||||
db
|
|
||||||
.prepare('SELECT count(*) FROM preKeys;', {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.get<number>() ?? 0;
|
|
||||||
logger.info(`updateToSchemaVersion91: Found ${totalKeys} keys`);
|
|
||||||
if (totalKeys < 1000) {
|
|
||||||
db.pragma('user_version = 91');
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Grab our PNI
|
// Grab PNI-specific count
|
||||||
|
|
||||||
let pni: PniString;
|
const [beforeQuery, beforeParams] =
|
||||||
const pniJson = db
|
sql`SELECT count(*) from preKeys WHERE ourServiceId = ${pni}`;
|
||||||
.prepare("SELECT json FROM items WHERE id IS 'pni'", {
|
const beforeKeys = db
|
||||||
pluck: true,
|
.prepare(beforeQuery, {
|
||||||
})
|
pluck: true,
|
||||||
.get<string>();
|
})
|
||||||
try {
|
.get(beforeParams);
|
||||||
const pniData = JSON.parse(pniJson ?? '');
|
logger.info(`Found ${beforeKeys} preKeys for PNI`);
|
||||||
pni = normalizePni(pniData.value, 'updateToSchemaVersion91');
|
|
||||||
} catch (error) {
|
|
||||||
db.pragma('user_version = 91');
|
|
||||||
if (pniJson) {
|
|
||||||
logger.warn(
|
|
||||||
'updateToSchemaVersion91: PNI found but did not parse',
|
|
||||||
Errors.toLogFormat(error)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.info('updateToSchemaVersion91: Our PNI not found');
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grab PNI-specific count
|
// Create index to help us with all these queries
|
||||||
|
|
||||||
const [beforeQuery, beforeParams] =
|
db.exec(`
|
||||||
sql`SELECT count(*) from preKeys WHERE ourServiceId = ${pni}`;
|
ALTER TABLE preKeys
|
||||||
const beforeKeys = db
|
ADD COLUMN createdAt NUMBER
|
||||||
.prepare(beforeQuery, {
|
GENERATED ALWAYS AS (json_extract(json, '$.createdAt'));
|
||||||
pluck: true,
|
|
||||||
})
|
CREATE INDEX preKeys_date
|
||||||
.get(beforeParams);
|
ON preKeys (ourServiceId, createdAt);
|
||||||
logger.info(`updateToSchemaVersion91: Found ${beforeKeys} preKeys for PNI`);
|
`);
|
||||||
|
logger.info('Temporary index created');
|
||||||
|
|
||||||
// Create index to help us with all these queries
|
// Fetch 500th-oldest timestamp for PNI
|
||||||
|
|
||||||
db.exec(`
|
const [oldQuery, oldParams] = sql`
|
||||||
ALTER TABLE preKeys
|
SELECT createdAt
|
||||||
ADD COLUMN createdAt NUMBER
|
FROM preKeys
|
||||||
GENERATED ALWAYS AS (json_extract(json, '$.createdAt'));
|
WHERE
|
||||||
|
createdAt IS NOT NULL AND
|
||||||
CREATE INDEX preKeys_date
|
ourServiceId = ${pni}
|
||||||
ON preKeys (ourServiceId, createdAt);
|
ORDER BY createdAt ASC
|
||||||
`);
|
LIMIT 1
|
||||||
logger.info('updateToSchemaVersion91: Temporary index created');
|
OFFSET 499
|
||||||
|
`;
|
||||||
|
const oldBoundary = db
|
||||||
|
.prepare(oldQuery, {
|
||||||
|
pluck: true,
|
||||||
|
})
|
||||||
|
.get(oldParams);
|
||||||
|
logger.info(`Found 500th-oldest timestamp: ${oldBoundary}`);
|
||||||
|
|
||||||
// Fetch 500th-oldest timestamp for PNI
|
// Fetch 500th-newest timestamp for PNI
|
||||||
|
|
||||||
const [oldQuery, oldParams] = sql`
|
const [newQuery, newParams] = sql`
|
||||||
SELECT createdAt
|
SELECT createdAt
|
||||||
FROM preKeys
|
FROM preKeys
|
||||||
WHERE
|
WHERE
|
||||||
|
createdAt IS NOT NULL AND
|
||||||
|
ourServiceId = ${pni}
|
||||||
|
ORDER BY createdAt DESC
|
||||||
|
LIMIT 1
|
||||||
|
OFFSET 499
|
||||||
|
`;
|
||||||
|
const newBoundary = db
|
||||||
|
.prepare(newQuery, {
|
||||||
|
pluck: true,
|
||||||
|
})
|
||||||
|
.get(newParams);
|
||||||
|
logger.info(`Found 500th-newest timestamp: ${newBoundary}`);
|
||||||
|
|
||||||
|
// Delete everything in between for PNI
|
||||||
|
|
||||||
|
let result: RunResult;
|
||||||
|
const [deleteQuery, deleteParams] = sql`
|
||||||
|
DELETE FROM preKeys
|
||||||
|
WHERE rowid IN (
|
||||||
|
SELECT rowid FROM preKeys
|
||||||
|
WHERE
|
||||||
createdAt IS NOT NULL AND
|
createdAt IS NOT NULL AND
|
||||||
|
createdAt > ${oldBoundary ?? null} AND
|
||||||
|
createdAt < ${newBoundary ?? null} AND
|
||||||
ourServiceId = ${pni}
|
ourServiceId = ${pni}
|
||||||
ORDER BY createdAt ASC
|
LIMIT 10000
|
||||||
LIMIT 1
|
|
||||||
OFFSET 499
|
|
||||||
`;
|
|
||||||
const oldBoundary = db
|
|
||||||
.prepare(oldQuery, {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.get(oldParams);
|
|
||||||
logger.info(
|
|
||||||
`updateToSchemaVersion91: Found 500th-oldest timestamp: ${oldBoundary}`
|
|
||||||
);
|
);
|
||||||
|
`;
|
||||||
|
const preparedQuery = db.prepare(deleteQuery);
|
||||||
|
do {
|
||||||
|
result = preparedQuery.run(deleteParams);
|
||||||
|
logger.info(`Deleted ${result.changes} items`);
|
||||||
|
} while (result.changes > 0);
|
||||||
|
logger.info('Delete is complete!');
|
||||||
|
|
||||||
// Fetch 500th-newest timestamp for PNI
|
// Get updated count for PNI
|
||||||
|
|
||||||
const [newQuery, newParams] = sql`
|
const [afterQuery, afterParams] = sql`
|
||||||
SELECT createdAt
|
SELECT count(*)
|
||||||
FROM preKeys
|
FROM preKeys
|
||||||
WHERE
|
WHERE ourServiceId = ${pni};
|
||||||
createdAt IS NOT NULL AND
|
`;
|
||||||
ourServiceId = ${pni}
|
const afterCount = db
|
||||||
ORDER BY createdAt DESC
|
.prepare(afterQuery, {
|
||||||
LIMIT 1
|
pluck: true,
|
||||||
OFFSET 499
|
})
|
||||||
`;
|
.get(afterParams);
|
||||||
const newBoundary = db
|
logger.info(`Found ${afterCount} preKeys for PNI after delete`);
|
||||||
.prepare(newQuery, {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.get(newParams);
|
|
||||||
logger.info(
|
|
||||||
`updateToSchemaVersion91: Found 500th-newest timestamp: ${newBoundary}`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Delete everything in between for PNI
|
db.exec(`
|
||||||
|
DROP INDEX preKeys_date;
|
||||||
let result: RunResult;
|
ALTER TABLE preKeys DROP COLUMN createdAt;
|
||||||
const [deleteQuery, deleteParams] = sql`
|
`);
|
||||||
DELETE FROM preKeys
|
|
||||||
WHERE rowid IN (
|
|
||||||
SELECT rowid FROM preKeys
|
|
||||||
WHERE
|
|
||||||
createdAt IS NOT NULL AND
|
|
||||||
createdAt > ${oldBoundary ?? null} AND
|
|
||||||
createdAt < ${newBoundary ?? null} AND
|
|
||||||
ourServiceId = ${pni}
|
|
||||||
LIMIT 10000
|
|
||||||
);
|
|
||||||
`;
|
|
||||||
const preparedQuery = db.prepare(deleteQuery);
|
|
||||||
do {
|
|
||||||
result = preparedQuery.run(deleteParams);
|
|
||||||
logger.info(`updateToSchemaVersion91: Deleted ${result.changes} items`);
|
|
||||||
} while (result.changes > 0);
|
|
||||||
logger.info('updateToSchemaVersion91: Delete is complete!');
|
|
||||||
|
|
||||||
// Get updated count for PNI
|
|
||||||
|
|
||||||
const [afterQuery, afterParams] = sql`
|
|
||||||
SELECT count(*)
|
|
||||||
FROM preKeys
|
|
||||||
WHERE ourServiceId = ${pni};
|
|
||||||
`;
|
|
||||||
const afterCount = db
|
|
||||||
.prepare(afterQuery, {
|
|
||||||
pluck: true,
|
|
||||||
})
|
|
||||||
.get(afterParams);
|
|
||||||
logger.info(
|
|
||||||
`updateToSchemaVersion91: Found ${afterCount} preKeys for PNI after delete`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.exec(`
|
|
||||||
DROP INDEX preKeys_date;
|
|
||||||
ALTER TABLE preKeys DROP COLUMN createdAt;
|
|
||||||
`);
|
|
||||||
|
|
||||||
db.pragma('user_version = 91');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion91: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,47 +11,30 @@ import { sql, sqlFragment } from '../util';
|
||||||
import { normalizePni } from '../../types/ServiceId';
|
import { normalizePni } from '../../types/ServiceId';
|
||||||
import * as Errors from '../../types/errors';
|
import * as Errors from '../../types/errors';
|
||||||
|
|
||||||
export const version = 920;
|
export default function updateToSchemaVersion920(
|
||||||
|
|
||||||
export function updateToSchemaVersion920(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): 'vacuum' {
|
||||||
if (currentVersion >= 920) {
|
cleanKeys(
|
||||||
return;
|
db,
|
||||||
}
|
logger,
|
||||||
|
'kyberPreKeys',
|
||||||
db.transaction(() => {
|
sqlFragment`kyberPreKeys`,
|
||||||
cleanKeys(
|
sqlFragment`createdAt`,
|
||||||
db,
|
sqlFragment`ourServiceId`
|
||||||
logger,
|
);
|
||||||
'updateToSchemaVersion920/kyberPreKeys',
|
cleanKeys(
|
||||||
sqlFragment`kyberPreKeys`,
|
db,
|
||||||
sqlFragment`createdAt`,
|
logger,
|
||||||
sqlFragment`ourServiceId`
|
'signedPreKeys',
|
||||||
);
|
sqlFragment`signedPreKeys`,
|
||||||
cleanKeys(
|
sqlFragment`created_at`,
|
||||||
db,
|
sqlFragment`ourServiceId`
|
||||||
logger,
|
|
||||||
'updateToSchemaVersion920/signedPreKeys',
|
|
||||||
sqlFragment`signedPreKeys`,
|
|
||||||
sqlFragment`created_at`,
|
|
||||||
sqlFragment`ourServiceId`
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion920: Done with deletions');
|
|
||||||
|
|
||||||
db.pragma('user_version = 920');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
'updateToSchemaVersion920: user_version set to 920. Starting vacuum...'
|
|
||||||
);
|
);
|
||||||
db.exec('VACUUM;');
|
|
||||||
logger.info('updateToSchemaVersion920: Vacuum complete.');
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion920: success!');
|
logger.info('Done with deletions, starting vacuum...');
|
||||||
|
|
||||||
|
return 'vacuum';
|
||||||
}
|
}
|
||||||
|
|
||||||
export function cleanKeys(
|
export function cleanKeys(
|
||||||
|
|
|
@ -1,26 +1,6 @@
|
||||||
// Copyright 2023 Signal Messenger, LLC
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
export default function updateToSchemaVersion930(): void {
|
||||||
|
// This was a migration that enabled 'secure-delete' in FTS
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 930;
|
|
||||||
|
|
||||||
export function updateToSchemaVersion930(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 930) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// This was a migration that enabled 'secure-delete' in FTS
|
|
||||||
|
|
||||||
db.pragma('user_version = 930');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion930: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,6 @@
|
||||||
// Copyright 2023 Signal Messenger, LLC
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
export default function updateToSchemaVersion940(): void {
|
||||||
|
// This was a migration that disabled secure-delete and rebuilt the index
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 940;
|
|
||||||
|
|
||||||
export function updateToSchemaVersion940(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 940) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// This was a migration that disabled secure-delete and rebuilt the index
|
|
||||||
db.pragma('user_version = 940');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion940: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,6 @@
|
||||||
// Copyright 2023 Signal Messenger, LLC
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
export default function updateToSchemaVersion950(): void {
|
||||||
|
// This was a migration that enable secure-delete
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 950;
|
|
||||||
|
|
||||||
export function updateToSchemaVersion950(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 950) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
db.transaction(() => {
|
|
||||||
// This was a migration that enable secure-delete
|
|
||||||
db.pragma('user_version = 950');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion950: success!');
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,44 +13,31 @@ import { normalizePni } from '../../types/ServiceId';
|
||||||
import { normalizeAci } from '../../util/normalizeAci';
|
import { normalizeAci } from '../../util/normalizeAci';
|
||||||
import type { JSONWithUnknownFields } from '../../types/Util';
|
import type { JSONWithUnknownFields } from '../../types/Util';
|
||||||
|
|
||||||
export const version = 960;
|
export default function updateToSchemaVersion960(
|
||||||
|
|
||||||
export function updateToSchemaVersion960(
|
|
||||||
currentVersion: number,
|
|
||||||
db: Database,
|
db: Database,
|
||||||
logger: LoggerType
|
logger: LoggerType
|
||||||
): void {
|
): void {
|
||||||
if (currentVersion >= 960) {
|
const ourServiceIds = migratePni(db, logger);
|
||||||
|
if (!ourServiceIds) {
|
||||||
|
logger.info('not running, pni is normalized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
db.transaction(() => {
|
// Migrate JSON fields
|
||||||
const ourServiceIds = migratePni(db, logger);
|
db.prepare(
|
||||||
if (!ourServiceIds) {
|
`
|
||||||
logger.info('updateToSchemaVersion960: not running, pni is normalized');
|
UPDATE conversations
|
||||||
return;
|
SET json = json_set(json, '$.pni', $pni)
|
||||||
}
|
WHERE serviceId IS $aci
|
||||||
|
`
|
||||||
|
).run({
|
||||||
|
aci: ourServiceIds.aci,
|
||||||
|
pni: ourServiceIds.pni,
|
||||||
|
});
|
||||||
|
|
||||||
// Migrate JSON fields
|
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
|
||||||
db.prepare(
|
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
|
||||||
`
|
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
|
||||||
UPDATE conversations
|
|
||||||
SET json = json_set(json, '$.pni', $pni)
|
|
||||||
WHERE serviceId IS $aci
|
|
||||||
`
|
|
||||||
).run({
|
|
||||||
aci: ourServiceIds.aci,
|
|
||||||
pni: ourServiceIds.pni,
|
|
||||||
});
|
|
||||||
|
|
||||||
migratePreKeys(db, 'preKeys', ourServiceIds, logger);
|
|
||||||
migratePreKeys(db, 'signedPreKeys', ourServiceIds, logger);
|
|
||||||
migratePreKeys(db, 'kyberPreKeys', ourServiceIds, logger);
|
|
||||||
|
|
||||||
db.pragma('user_version = 960');
|
|
||||||
})();
|
|
||||||
|
|
||||||
logger.info('updateToSchemaVersion960: success!');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
@ -101,12 +88,9 @@ function migratePni(
|
||||||
[aci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
|
[aci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (uuidIdJson) {
|
if (uuidIdJson) {
|
||||||
logger.warn(
|
logger.warn('failed to parse uuid_id item', error);
|
||||||
'updateToSchemaVersion960: failed to parse uuid_id item',
|
|
||||||
error
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
logger.info('updateToSchemaVersion960: Our ACI not found');
|
logger.info('Our ACI not found');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!aci) {
|
if (!aci) {
|
||||||
|
@ -118,9 +102,9 @@ function migratePni(
|
||||||
legacyPni = JSON.parse(pniJson ?? '').value;
|
legacyPni = JSON.parse(pniJson ?? '').value;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (pniJson) {
|
if (pniJson) {
|
||||||
logger.warn('updateToSchemaVersion960: failed to parse pni item', error);
|
logger.warn('failed to parse pni item', error);
|
||||||
} else {
|
} else {
|
||||||
logger.info('updateToSchemaVersion960: Our PNI not found');
|
logger.info('Our PNI not found');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!legacyPni) {
|
if (!legacyPni) {
|
||||||
|
@ -164,10 +148,7 @@ function migratePni(
|
||||||
|
|
||||||
updateStmt.run({ id, json: JSON.stringify(data) });
|
updateStmt.run({ id, json: JSON.stringify(data) });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse ${id} item`, error);
|
||||||
`updateToSchemaVersion960: failed to parse ${id} item`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
|
@ -200,11 +181,11 @@ function migratePreKeys(
|
||||||
WHERE id = $id
|
WHERE id = $id
|
||||||
`);
|
`);
|
||||||
|
|
||||||
logger.info(`updateToSchemaVersion960: updating ${preKeys.length} ${table}`);
|
logger.info(`updating ${preKeys.length} ${table}`);
|
||||||
for (const { id, json } of preKeys) {
|
for (const { id, json } of preKeys) {
|
||||||
const match = id.match(/^(.*):(.*)$/);
|
const match = id.match(/^(.*):(.*)$/);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
logger.warn(`updateToSchemaVersion960: invalid ${table} id ${id}`);
|
logger.warn(`invalid ${table} id ${id}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,20 +193,13 @@ function migratePreKeys(
|
||||||
try {
|
try {
|
||||||
legacyData = JSON.parse(json);
|
legacyData = JSON.parse(json);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(`failed to parse ${table} ${id}`, error);
|
||||||
`updateToSchemaVersion960: failed to parse ${table} ${id}`,
|
|
||||||
error
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [, ourServiceId, keyId] = match;
|
const [, ourServiceId, keyId] = match;
|
||||||
if (ourServiceId !== legacyPni) {
|
if (ourServiceId !== legacyPni) {
|
||||||
logger.warn(
|
logger.warn('unexpected ourServiceId', ourServiceId, legacyPni);
|
||||||
'updateToSchemaVersion960: unexpected ourServiceId',
|
|
||||||
ourServiceId,
|
|
||||||
legacyPni
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,29 +2,9 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import type { Database } from '@signalapp/sqlcipher';
|
import type { Database } from '@signalapp/sqlcipher';
|
||||||
import type { LoggerType } from '../../types/Logging';
|
|
||||||
|
|
||||||
export const version = 970;
|
export default function updateToSchemaVersion970(db: Database): void {
|
||||||
|
db.exec(`
|
||||||
export function updateToSchemaVersion970(
|
INSERT INTO messages_fts(messages_fts) VALUES ('optimize');
|
||||||
currentVersion: number,
|
`);
|
||||||
db: Database,
|
|
||||||
logger: LoggerType
|
|
||||||
): void {
|
|
||||||
if (currentVersion >= 970) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const start = Date.now();
|
|
||||||
db.transaction(() => {
|
|
||||||
db.exec(`
|
|
||||||
INSERT INTO messages_fts(messages_fts) VALUES ('optimize');
|
|
||||||
`);
|
|
||||||
db.pragma('user_version = 970');
|
|
||||||
})();
|
|
||||||
|
|
||||||
const duration = Date.now() - start;
|
|
||||||
logger.info(
|
|
||||||
`updateToSchemaVersion970: success! fts optimize took ${duration}ms`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue