Resilience to failed PNI key upload, clean more key tables

This commit is contained in:
Scott Nonnenberg 2023-08-28 17:26:39 -07:00 committed by GitHub
parent e05de1aa6f
commit cbd16b90bb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 521 additions and 27 deletions

View file

@ -0,0 +1,178 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database, RunResult } from '@signalapp/better-sqlite3';
import type { LoggerType } from '../../types/Logging';
import type { QueryFragment } from '../util';
import type { PniString } from '../../types/ServiceId';
import { sql, sqlFragment } from '../util';
import { normalizePni } from '../../types/ServiceId';
import * as Errors from '../../types/errors';
export const version = 920;
export function updateToSchemaVersion920(
currentVersion: number,
db: Database,
logger: LoggerType
): void {
if (currentVersion >= 920) {
return;
}
db.transaction(() => {
// Grab our PNI
let pni: PniString;
const pniJson = db
.prepare("SELECT json FROM items WHERE id IS 'pni'")
.pluck()
.get();
try {
const pniData = JSON.parse(pniJson);
pni = normalizePni(pniData.value, 'updateToSchemaVersion920');
} catch (error) {
db.pragma('user_version = 920');
if (pniJson) {
logger.warn(
'updateToSchemaVersion920: PNI found but did not parse',
Errors.toLogFormat(error)
);
} else {
logger.info('updateToSchemaVersion920: Our PNI not found');
}
return;
}
const cleanKeys = (
forLogging: string,
tableName: QueryFragment,
columnName: QueryFragment
) => {
const logId = `updateToSchemaVersion920(${forLogging})`;
// Do overall count - if it's less than 1000, move on
const totalKeys = db
.prepare(sql`SELECT count(*) FROM ${tableName};`[0])
.pluck(true)
.get();
logger.info(`${logId}: Found ${totalKeys} total keys`);
if (totalKeys < 1000) {
return;
}
// Grab PNI-specific count
const [
beforeQuery,
beforeParams,
] = sql`SELECT count(*) from ${tableName} WHERE ourServiceId = ${pni}`;
const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams);
logger.info(`${logId}: Found ${beforeKeys} keys for PNI`);
// Create index to help us with all these queries
db.exec(
sql`
ALTER TABLE ${tableName}
ADD COLUMN createdAt NUMBER
GENERATED ALWAYS AS (json_extract(json, '$.${columnName}'));
CREATE INDEX ${tableName}_date
ON ${tableName} (ourServiceId, createdAt);
`[0]
);
logger.info(`${logId}: Temporary index created`);
// Fetch 500th-oldest timestamp for PNI
const [oldQuery, oldParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt ASC
LIMIT 1
OFFSET 499
`;
const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams);
logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`);
// Fetch 500th-newest timestamp for PNI
const [newQuery, newParams] = sql`
SELECT createdAt
FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
ourServiceId = ${pni}
ORDER BY createdAt DESC
LIMIT 1
OFFSET 499
`;
const newBoundary = db.prepare(newQuery).pluck(true).get(newParams);
logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`);
// Delete everything in between for PNI
let result: RunResult;
const [deleteQuery, deleteParams] = sql`
DELETE FROM ${tableName}
WHERE
createdAt IS NOT NULL AND
createdAt > ${oldBoundary} AND
createdAt < ${newBoundary} AND
ourServiceId = ${pni}
LIMIT 10000;
`;
const preparedQuery = db.prepare(deleteQuery);
do {
result = preparedQuery.run(deleteParams);
logger.info(`${logId}: Deleted ${result.changes} keys`);
} while (result.changes > 0);
logger.info(`${logId}: Delete is complete!`);
// Get updated count for PNI
const [afterQuery, afterParams] = sql`
SELECT count(*)
FROM ${tableName}
WHERE ourServiceId = ${pni};
`;
const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams);
logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`);
db.exec(
sql`
DROP INDEX ${tableName}_date;
ALTER TABLE ${tableName} DROP COLUMN createdAt;
`[0]
);
};
cleanKeys(
'kyberPreKeys',
sqlFragment`kyberPreKeys`,
sqlFragment`createdAt`
);
cleanKeys(
'signedPreKeys',
sqlFragment`signedPreKeys`,
sqlFragment`created_at`
);
logger.info('updateToSchemaVersion920: Done with deletions');
})();
logger.info('updateToSchemaVersion920: Starting vacuum...');
db.exec('VACUUM;');
logger.info('updateToSchemaVersion920: Vacuum complete.');
db.pragma('user_version = 920');
logger.info('updateToSchemaVersion920: success!');
}

View file

@ -66,6 +66,10 @@ import updateToSchemaVersion88 from './88-service-ids';
import updateToSchemaVersion89 from './89-call-history';
import updateToSchemaVersion90 from './90-delete-story-reply-screenshot';
import updateToSchemaVersion91 from './91-clean-keys';
import {
version as MAX_VERSION,
updateToSchemaVersion920,
} from './920-clean-more-keys';
function updateToSchemaVersion1(
currentVersion: number,
@ -1908,7 +1912,7 @@ export const SCHEMA_VERSIONS = [
updateToSchemaVersion2,
updateToSchemaVersion3,
updateToSchemaVersion4,
(_v: number, _i: Database, _l: LoggerType): void => undefined, // version 5 was dropped
// version 5 was dropped
updateToSchemaVersion6,
updateToSchemaVersion7,
updateToSchemaVersion8,
@ -1998,44 +2002,45 @@ export const SCHEMA_VERSIONS = [
updateToSchemaVersion84,
updateToSchemaVersion85,
updateToSchemaVersion86,
(_v: number, _i: Database, _l: LoggerType): void => undefined, // version 87 was dropped
// version 87 was dropped
updateToSchemaVersion88,
updateToSchemaVersion89,
updateToSchemaVersion90,
updateToSchemaVersion90,
updateToSchemaVersion91,
// From here forward, all migrations should be multiples of 10
updateToSchemaVersion920,
];
export function updateSchema(db: Database, logger: LoggerType): void {
const sqliteVersion = getSQLiteVersion(db);
const sqlcipherVersion = getSQLCipherVersion(db);
const userVersion = getUserVersion(db);
const maxUserVersion = SCHEMA_VERSIONS.length;
const startingVersion = getUserVersion(db);
const schemaVersion = getSchemaVersion(db);
logger.info(
'updateSchema:\n',
` Current user_version: ${userVersion};\n`,
` Most recent db schema: ${maxUserVersion};\n`,
` Current user_version: ${startingVersion};\n`,
` Most recent db schema: ${MAX_VERSION};\n`,
` SQLite version: ${sqliteVersion};\n`,
` SQLCipher version: ${sqlcipherVersion};\n`,
` (deprecated) schema_version: ${schemaVersion};\n`
);
if (userVersion > maxUserVersion) {
if (startingVersion > MAX_VERSION) {
throw new Error(
`SQL: User version is ${userVersion} but the expected maximum version ` +
`is ${maxUserVersion}. Did you try to start an old version of Signal?`
`SQL: User version is ${startingVersion} but the expected maximum version ` +
`is ${MAX_VERSION}. Did you try to start an old version of Signal?`
);
}
for (let index = 0; index < maxUserVersion; index += 1) {
for (let index = 0, max = SCHEMA_VERSIONS.length; index < max; index += 1) {
const runSchemaUpdate = SCHEMA_VERSIONS[index];
runSchemaUpdate(userVersion, db, logger);
runSchemaUpdate(startingVersion, db, logger);
}
if (userVersion !== maxUserVersion) {
if (startingVersion !== MAX_VERSION) {
const start = Date.now();
db.pragma('optimize');
const duration = Date.now() - start;

View file

@ -0,0 +1,297 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { range } from 'lodash';
import { insertData, updateToVersion } from './helpers';
import type { ServiceIdString } from '../../types/ServiceId';
import { normalizeAci, normalizePni } from '../../types/ServiceId';
import type { KyberPreKeyType, SignedPreKeyType } from '../../sql/Interface';
type TestingKyberKey = Omit<
KyberPreKeyType,
'data' | 'isLastResort' | 'isConfirmed' | 'createdAt'
> & {
createdAt: number | undefined;
};
type TestingSignedKey = Omit<
SignedPreKeyType,
'privateKey' | 'publicKey' | 'confirmed' | 'created_at'
> & {
created_at: number | undefined;
};
describe('SQL/updateToSchemaVersion92', () => {
let db: Database;
const OUR_ACI = normalizeAci(generateGuid(), 'updateToSchemaVersion92 test');
const OUR_PNI = normalizePni(
`PNI:${generateGuid()}`,
'updateToSchemaVersion92 test'
);
let idCount = 0;
beforeEach(() => {
db = new SQL(':memory:');
updateToVersion(db, 91);
});
afterEach(() => {
db.close();
});
function addPni() {
insertData(db, 'items', [
{
id: 'pni',
json: {
id: 'pni',
value: OUR_PNI,
},
},
]);
}
function getCountOfKyberKeys(): number {
return db.prepare('SELECT count(*) FROM kyberPreKeys;').pluck(true).get();
}
function getCountOfSignedKeys(): number {
return db.prepare('SELECT count(*) FROM signedPreKeys;').pluck(true).get();
}
function getPragma(): number {
return db.prepare('PRAGMA user_version;').pluck(true).get();
}
function generateKyberKey(
createdAt: number | undefined,
ourServiceId: ServiceIdString
): TestingKyberKey {
idCount += 1;
return {
createdAt,
id: `${ourServiceId}:${idCount}`,
keyId: idCount,
ourServiceId,
};
}
function generateSignedKey(
createdAt: number | undefined,
ourServiceId: ServiceIdString
): TestingSignedKey {
idCount += 1;
return {
created_at: createdAt,
id: `${ourServiceId}:${idCount}`,
keyId: idCount,
ourServiceId,
};
}
function getRangeOfKyberKeysForInsert(
start: number,
end: number,
ourServiceId: ServiceIdString,
options?: {
clearCreatedAt?: boolean;
}
): Array<{ id: string; json: TestingKyberKey }> {
return range(start, end).map(createdAt => {
const key = generateKyberKey(
options?.clearCreatedAt ? undefined : createdAt,
ourServiceId
);
return {
id: key.id,
json: key,
};
});
}
it('handles missing PNI', () => {
assert.strictEqual(0, getCountOfKyberKeys());
insertData(
db,
'kyberPreKeys',
getRangeOfKyberKeysForInsert(0, 3000, OUR_ACI)
);
assert.strictEqual(3000, getCountOfKyberKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(3000, getCountOfKyberKeys());
});
describe('kyberPreKeys', () => {
it('deletes 2000 extra keys', () => {
assert.strictEqual(0, getCountOfKyberKeys());
addPni();
insertData(
db,
'kyberPreKeys',
getRangeOfKyberKeysForInsert(0, 3000, OUR_PNI)
);
assert.strictEqual(3000, getCountOfKyberKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(1000, getCountOfKyberKeys());
});
it('leaves 1000 existing keys alone', () => {
assert.strictEqual(0, getCountOfKyberKeys());
addPni();
insertData(
db,
'kyberPreKeys',
getRangeOfKyberKeysForInsert(0, 1000, OUR_PNI)
);
assert.strictEqual(1000, getCountOfKyberKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(1000, getCountOfKyberKeys());
});
it('leaves keys with missing createdAt alone', () => {
assert.strictEqual(0, getCountOfKyberKeys());
addPni();
insertData(
db,
'kyberPreKeys',
getRangeOfKyberKeysForInsert(0, 3000, OUR_PNI, { clearCreatedAt: true })
);
assert.strictEqual(3000, getCountOfKyberKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(3000, getCountOfKyberKeys());
});
it('leaves extra ACI keys alone, even if above 1000', () => {
assert.strictEqual(0, getCountOfKyberKeys());
addPni();
insertData(
db,
'kyberPreKeys',
getRangeOfKyberKeysForInsert(0, 3000, OUR_ACI)
);
assert.strictEqual(3000, getCountOfKyberKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(3000, getCountOfKyberKeys());
});
});
describe('signedPreKeys', () => {
function getRangeOfSignedKeysForInsert(
start: number,
end: number,
ourServiceId: ServiceIdString,
options?: {
clearCreatedAt?: boolean;
}
): Array<{ id: string; json: TestingSignedKey }> {
return range(start, end).map(createdAt => {
const key = generateSignedKey(
options?.clearCreatedAt ? undefined : createdAt,
ourServiceId
);
return {
id: key.id,
json: key,
};
});
}
it('deletes 2000 extra keys', () => {
assert.strictEqual(0, getCountOfSignedKeys());
addPni();
insertData(
db,
'signedPreKeys',
getRangeOfSignedKeysForInsert(0, 3000, OUR_PNI)
);
assert.strictEqual(3000, getCountOfSignedKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(1000, getCountOfSignedKeys());
});
it('leaves 1000 existing keys alone', () => {
assert.strictEqual(0, getCountOfSignedKeys());
addPni();
insertData(
db,
'signedPreKeys',
getRangeOfSignedKeysForInsert(0, 1000, OUR_PNI)
);
assert.strictEqual(1000, getCountOfSignedKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(1000, getCountOfSignedKeys());
});
it('leaves keys with missing createdAt alone', () => {
assert.strictEqual(0, getCountOfSignedKeys());
addPni();
insertData(
db,
'signedPreKeys',
getRangeOfSignedKeysForInsert(0, 3000, OUR_PNI, {
clearCreatedAt: true,
})
);
assert.strictEqual(3000, getCountOfSignedKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(3000, getCountOfSignedKeys());
});
it('leaves extra ACI keys alone, even if above 1000', () => {
assert.strictEqual(0, getCountOfSignedKeys());
addPni();
insertData(
db,
'signedPreKeys',
getRangeOfSignedKeysForInsert(0, 3000, OUR_ACI)
);
assert.strictEqual(3000, getCountOfSignedKeys());
assert.strictEqual(91, getPragma());
updateToVersion(db, 920);
assert.strictEqual(920, getPragma());
assert.strictEqual(3000, getCountOfSignedKeys());
});
});
});

View file

@ -570,14 +570,7 @@ export default class AccountManager extends EventTarget {
signedPreKey,
};
try {
await this.server.registerKeys(toUpload, serviceIdKind);
} catch (error) {
log.error(`${logId} upload error:`, Errors.toLogFormat(error));
throw error;
}
await this.server.registerKeys(toUpload, serviceIdKind);
await this._confirmKeys(toUpload, serviceIdKind);
const { count: updatedPreKeyCount, pqCount: updatedKyberPreKeyCount } =

View file

@ -1,14 +1,13 @@
// Copyright 2017 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { isNumber } from 'lodash';
import * as durations from '../util/durations';
import { clearTimeoutIfNecessary } from '../util/clearTimeoutIfNecessary';
import * as Registration from '../util/registration';
import { ServiceIdKind } from '../types/ServiceId';
import * as log from '../logging/log';
import * as Errors from '../types/errors';
import { HTTPError } from './Errors';
const UPDATE_INTERVAL = 2 * durations.DAY;
const UPDATE_TIME_STORAGE_KEY = 'nextScheduledUpdateKeyTime';
@ -57,14 +56,36 @@ export class UpdateKeysListener {
const accountManager = window.getAccountManager();
await accountManager.maybeUpdateKeys(ServiceIdKind.ACI);
await accountManager.maybeUpdateKeys(ServiceIdKind.PNI);
try {
await accountManager.maybeUpdateKeys(ServiceIdKind.PNI);
} catch (error) {
if (
error instanceof HTTPError &&
(error.code === 422 || error.code === 403)
) {
log.error(
`UpdateKeysListener.run: Got a ${error.code} uploading PNI keys; unlinking`
);
window.Whisper.events.trigger('unlinkAndDisconnect');
} else {
const errorString =
error instanceof HTTPError
? error.code.toString()
: Errors.toLogFormat(error);
log.error(
`UpdateKeysListener.run: Failure uploading PNI keys. Not trying again. ${errorString}`
);
}
}
this.scheduleNextUpdate();
this.setTimeoutForNextRun();
} catch (error) {
const errorString = isNumber(error.code)
? error.code.toString()
: Errors.toLogFormat(error);
const errorString =
error instanceof HTTPError
? error.code.toString()
: Errors.toLogFormat(error);
log.error(
`UpdateKeysListener.run failure - trying again in five minutes ${errorString}`
);