Key cleanup before migration 88 and attempt vacuum only once in 920

This commit is contained in:
Scott Nonnenberg 2023-09-11 18:19:38 -07:00 committed by GitHub
parent 6ae0edcc1e
commit be6331d75f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 615 additions and 146 deletions

View file

@ -0,0 +1,54 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/better-sqlite3';
import type { LoggerType } from '../../types/Logging';
import { cleanKeys } from './920-clean-more-keys';
import { sqlFragment } from '../util';
// Note: for many users, this is not what ran for them as migration 87. You can see that
// migration here: https://github.com/signalapp/Signal-Desktop/commit/671e16ae1f869627f355113d6397ccb62d5461d2
// The goal of this migration is to ensure that key cleanup happens before migration 88.
export default function updateToSchemaVersion87(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
// We're checking for the version of the next migration here, not this version. We want
// this to run if the user hasn't yet successfully run migration 88.
if (currentVersion >= 88) {
return;
}
db.transaction(() => {
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/preKeys',
sqlFragment`preKeys`,
sqlFragment`createdAt`,
sqlFragment`ourUuid`
);
cleanKeys(
db,
logger,
'updateToSchemaVersion87(cleanup)/signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`,
sqlFragment`ourUuid`
);
})();
logger.info('updateToSchemaVersion87(cleanup): success!');
}

View file

@ -23,156 +23,156 @@ export function updateToSchemaVersion920(
}
db.transaction(() => {
// Grab our PNI
let pni: PniString;
const pniJson = db
.prepare("SELECT json FROM items WHERE id IS 'pni'")
.pluck()
.get();
try {
const pniData = JSON.parse(pniJson);
pni = normalizePni(pniData.value, 'updateToSchemaVersion920');
} catch (error) {
db.pragma('user_version = 920');
if (pniJson) {
logger.warn(
'updateToSchemaVersion920: PNI found but did not parse',
Errors.toLogFormat(error)
);
} else {
logger.info('updateToSchemaVersion920: Our PNI not found');
}
return;
}
const cleanKeys = (
forLogging: string,
tableName: QueryFragment,
columnName: QueryFragment
) => {
const logId = `updateToSchemaVersion920(${forLogging})`;
// Do overall count - if it's less than 1000, move on
const totalKeys = db
.prepare(sql`SELECT count(*) FROM ${tableName};`[0])
.pluck(true)
.get();
logger.info(`${logId}: Found ${totalKeys} total keys`);
if (totalKeys < 1000) {
return;
}
// Grab PNI-specific count
const [
beforeQuery,
beforeParams,
] = sql`SELECT count(*) from ${tableName} WHERE ourServiceId = ${pni}`;
const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams);
logger.info(`${logId}: Found ${beforeKeys} keys for PNI`);
// Create index to help us with all these queries
db.exec(
sql`
ALTER TABLE ${tableName}
ADD COLUMN createdAt NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.${columnName}'));
CREATE INDEX ${tableName}_date
ON ${tableName} (ourServiceId, createdAt);
`[0]
);
logger.info(`${logId}: Temporary index created`);
// Fetch 500th-oldest timestamp for PNI
const [oldQuery, oldParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt ASC
LIMIT 1
OFFSET 499
`;
const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams);
logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`);
// Fetch 500th-newest timestamp for PNI
const [newQuery, newParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt DESC
LIMIT 1
OFFSET 499
`;
const newBoundary = db.prepare(newQuery).pluck(true).get(newParams);
logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`);
// Delete everything in between for PNI
let result: RunResult;
const [deleteQuery, deleteParams] = sql`
DELETE FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
createdAt > ${oldBoundary} AND
createdAt < ${newBoundary} AND
ourServiceId = ${pni}
LIMIT 10000;
`;
const preparedQuery = db.prepare(deleteQuery);
do {
result = preparedQuery.run(deleteParams);
logger.info(`${logId}: Deleted ${result.changes} keys`);
} while (result.changes > 0);
logger.info(`${logId}: Delete is complete!`);
// Get updated count for PNI
const [afterQuery, afterParams] = sql`
SELECT count(*)
FROM ${tableName}
WHERE ourServiceId = ${pni};
`;
const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams);
logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`);
db.exec(
sql`
DROP INDEX ${tableName}_date;
ALTER TABLE ${tableName} DROP COLUMN createdAt;
`[0]
);
};
cleanKeys(
'kyberPreKeys',
db,
logger,
'updateToSchemaVersion920/kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`
sqlFragment`createdAt`,
sqlFragment`ourServiceId`
);
cleanKeys(
'signedPreKeys',
db,
logger,
'updateToSchemaVersion920/signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`
sqlFragment`created_at`,
sqlFragment`ourServiceId`
);
logger.info('updateToSchemaVersion920: Done with deletions');
})();
logger.info('updateToSchemaVersion920: Starting vacuum...');
db.pragma('user_version = 920');
logger.info(
'updateToSchemaVersion920: user_version set to 920. Starting vacuum...'
);
db.exec('VACUUM;');
logger.info('updateToSchemaVersion920: Vacuum complete.');
db.pragma('user_version = 920');
logger.info('updateToSchemaVersion920: success!');
}
export function cleanKeys(
db: Database,
logger: LoggerType,
logId: string,
tableName: QueryFragment,
columnName: QueryFragment,
idField: QueryFragment
): void {
// Grab our PNI
let pni: PniString;
const pniJson = db
.prepare("SELECT json FROM items WHERE id IS 'pni'")
.pluck()
.get();
try {
const pniData = JSON.parse(pniJson);
pni = normalizePni(pniData.value, logId);
} catch (error) {
if (pniJson) {
logger.warn(
`${logId}: PNI found but did not parse`,
Errors.toLogFormat(error)
);
} else {
logger.info(`${logId}: Our PNI not found`);
}
return;
}
// Do overall count - if it's less than 1000, move on
const totalKeys = db
.prepare(sql`SELECT count(*) FROM ${tableName};`[0])
.pluck(true)
.get();
logger.info(`${logId}: Found ${totalKeys} total keys`);
if (totalKeys < 1000) {
return;
}
// Grab PNI-specific count
const [
beforeQuery,
beforeParams,
] = sql`SELECT count(*) from ${tableName} WHERE ${idField} = ${pni}`;
const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams);
logger.info(`${logId}: Found ${beforeKeys} keys for PNI`);
// Create index to help us with all these queries
db.exec(
sql`
ALTER TABLE ${tableName}
ADD COLUMN createdAt NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.${columnName}'));
CREATE INDEX ${tableName}_date
ON ${tableName} (${idField}, createdAt);
`[0]
);
logger.info(`${logId}: Temporary index created`);
// Fetch 500th-oldest timestamp for PNI
const [oldQuery, oldParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
${idField} = ${pni}
ORDER BY createdAt ASC
LIMIT 1
OFFSET 499
`;
const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams);
logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`);
// Fetch 500th-newest timestamp for PNI
const [newQuery, newParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
${idField} = ${pni}
ORDER BY createdAt DESC
LIMIT 1
OFFSET 499
`;
const newBoundary = db.prepare(newQuery).pluck(true).get(newParams);
logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`);
// Delete everything in between for PNI
let result: RunResult;
const [deleteQuery, deleteParams] = sql`
DELETE FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
createdAt > ${oldBoundary} AND
createdAt < ${newBoundary} AND
${idField} = ${pni}
LIMIT 10000;
`;
const preparedQuery = db.prepare(deleteQuery);
do {
result = preparedQuery.run(deleteParams);
logger.info(`${logId}: Deleted ${result.changes} keys`);
} while (result.changes > 0);
logger.info(`${logId}: Delete is complete!`);
// Get updated count for PNI
const [afterQuery, afterParams] = sql`
SELECT count(*)
FROM ${tableName}
WHERE ${idField} = ${pni};
`;
const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams);
logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`);
db.exec(
sql`
DROP INDEX ${tableName}_date;
ALTER TABLE ${tableName} DROP COLUMN createdAt;
`[0]
);
}

View file

@ -62,6 +62,7 @@ import updateToSchemaVersion83 from './83-mentions';
import updateToSchemaVersion84 from './84-all-mentions';
import updateToSchemaVersion85 from './85-add-kyber-keys';
import updateToSchemaVersion86 from './86-story-replies-index';
import updateToSchemaVersion87 from './87-cleanup';
import updateToSchemaVersion88 from './88-service-ids';
import updateToSchemaVersion89 from './89-call-history';
import updateToSchemaVersion90 from './90-delete-story-reply-screenshot';
@ -775,13 +776,7 @@ function updateToSchemaVersion17(
);
}
try {
db.exec('DROP INDEX messages_view_once;');
} catch (error) {
logger.info(
'updateToSchemaVersion17: Index messages_view_once did not already exist'
);
}
db.exec('DROP INDEX IF EXISTS messages_view_once;');
db.exec(`
CREATE INDEX messages_view_once ON messages (
@ -2004,7 +1999,7 @@ export const SCHEMA_VERSIONS = [
updateToSchemaVersion84,
updateToSchemaVersion85,
updateToSchemaVersion86,
// version 87 was dropped
updateToSchemaVersion87,
updateToSchemaVersion88,
updateToSchemaVersion89,