Key cleanup before migration 88 and attempt vacuum only once in 920
This commit is contained in:
parent
6ae0edcc1e
commit
be6331d75f
4 changed files with 615 additions and 146 deletions
54
ts/sql/migrations/87-cleanup.ts
Normal file
54
ts/sql/migrations/87-cleanup.ts
Normal file
|
@ -0,0 +1,54 @@
|
|||
// Copyright 2023 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import type { Database } from '@signalapp/better-sqlite3';
|
||||
|
||||
import type { LoggerType } from '../../types/Logging';
|
||||
import { cleanKeys } from './920-clean-more-keys';
|
||||
import { sqlFragment } from '../util';
|
||||
|
||||
// Note: for many users, this is not what ran for them as migration 87. You can see that
|
||||
// migration here: https://github.com/signalapp/Signal-Desktop/commit/671e16ae1f869627f355113d6397ccb62d5461d2
|
||||
|
||||
// The goal of this migration is to ensure that key cleanup happens before migration 88.
|
||||
|
||||
export default function updateToSchemaVersion87(
|
||||
currentVersion: number,
|
||||
db: Database,
|
||||
logger: LoggerType
|
||||
): void {
|
||||
// We're checking for the version of the next migration here, not this version. We want
|
||||
// this to run if the user hasn't yet successfully run migration 88.
|
||||
if (currentVersion >= 88) {
|
||||
return;
|
||||
}
|
||||
|
||||
db.transaction(() => {
|
||||
cleanKeys(
|
||||
db,
|
||||
logger,
|
||||
'updateToSchemaVersion87(cleanup)/kyberPreKeys',
|
||||
sqlFragment`kyberPreKeys`,
|
||||
sqlFragment`createdAt`,
|
||||
sqlFragment`ourUuid`
|
||||
);
|
||||
cleanKeys(
|
||||
db,
|
||||
logger,
|
||||
'updateToSchemaVersion87(cleanup)/preKeys',
|
||||
sqlFragment`preKeys`,
|
||||
sqlFragment`createdAt`,
|
||||
sqlFragment`ourUuid`
|
||||
);
|
||||
cleanKeys(
|
||||
db,
|
||||
logger,
|
||||
'updateToSchemaVersion87(cleanup)/signedPreKeys',
|
||||
sqlFragment`signedPreKeys`,
|
||||
sqlFragment`created_at`,
|
||||
sqlFragment`ourUuid`
|
||||
);
|
||||
})();
|
||||
|
||||
logger.info('updateToSchemaVersion87(cleanup): success!');
|
||||
}
|
|
@ -23,156 +23,156 @@ export function updateToSchemaVersion920(
|
|||
}
|
||||
|
||||
db.transaction(() => {
|
||||
// Grab our PNI
|
||||
|
||||
let pni: PniString;
|
||||
const pniJson = db
|
||||
.prepare("SELECT json FROM items WHERE id IS 'pni'")
|
||||
.pluck()
|
||||
.get();
|
||||
try {
|
||||
const pniData = JSON.parse(pniJson);
|
||||
pni = normalizePni(pniData.value, 'updateToSchemaVersion920');
|
||||
} catch (error) {
|
||||
db.pragma('user_version = 920');
|
||||
if (pniJson) {
|
||||
logger.warn(
|
||||
'updateToSchemaVersion920: PNI found but did not parse',
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
} else {
|
||||
logger.info('updateToSchemaVersion920: Our PNI not found');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const cleanKeys = (
|
||||
forLogging: string,
|
||||
tableName: QueryFragment,
|
||||
columnName: QueryFragment
|
||||
) => {
|
||||
const logId = `updateToSchemaVersion920(${forLogging})`;
|
||||
|
||||
// Do overall count - if it's less than 1000, move on
|
||||
|
||||
const totalKeys = db
|
||||
.prepare(sql`SELECT count(*) FROM ${tableName};`[0])
|
||||
.pluck(true)
|
||||
.get();
|
||||
logger.info(`${logId}: Found ${totalKeys} total keys`);
|
||||
if (totalKeys < 1000) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab PNI-specific count
|
||||
|
||||
const [
|
||||
beforeQuery,
|
||||
beforeParams,
|
||||
] = sql`SELECT count(*) from ${tableName} WHERE ourServiceId = ${pni}`;
|
||||
const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams);
|
||||
logger.info(`${logId}: Found ${beforeKeys} keys for PNI`);
|
||||
|
||||
// Create index to help us with all these queries
|
||||
|
||||
db.exec(
|
||||
sql`
|
||||
ALTER TABLE ${tableName}
|
||||
ADD COLUMN createdAt NUMBER
|
||||
GENERATED ALWAYS AS (json_extract(json, '$.${columnName}'));
|
||||
|
||||
CREATE INDEX ${tableName}_date
|
||||
ON ${tableName} (ourServiceId, createdAt);
|
||||
`[0]
|
||||
);
|
||||
logger.info(`${logId}: Temporary index created`);
|
||||
|
||||
// Fetch 500th-oldest timestamp for PNI
|
||||
|
||||
const [oldQuery, oldParams] = sql`
|
||||
SELECT createdAt
|
||||
FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
ourServiceId = ${pni}
|
||||
ORDER BY createdAt ASC
|
||||
LIMIT 1
|
||||
OFFSET 499
|
||||
`;
|
||||
const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams);
|
||||
logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`);
|
||||
|
||||
// Fetch 500th-newest timestamp for PNI
|
||||
|
||||
const [newQuery, newParams] = sql`
|
||||
SELECT createdAt
|
||||
FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
ourServiceId = ${pni}
|
||||
ORDER BY createdAt DESC
|
||||
LIMIT 1
|
||||
OFFSET 499
|
||||
`;
|
||||
const newBoundary = db.prepare(newQuery).pluck(true).get(newParams);
|
||||
logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`);
|
||||
|
||||
// Delete everything in between for PNI
|
||||
|
||||
let result: RunResult;
|
||||
const [deleteQuery, deleteParams] = sql`
|
||||
DELETE FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
createdAt > ${oldBoundary} AND
|
||||
createdAt < ${newBoundary} AND
|
||||
ourServiceId = ${pni}
|
||||
LIMIT 10000;
|
||||
`;
|
||||
const preparedQuery = db.prepare(deleteQuery);
|
||||
do {
|
||||
result = preparedQuery.run(deleteParams);
|
||||
logger.info(`${logId}: Deleted ${result.changes} keys`);
|
||||
} while (result.changes > 0);
|
||||
logger.info(`${logId}: Delete is complete!`);
|
||||
|
||||
// Get updated count for PNI
|
||||
|
||||
const [afterQuery, afterParams] = sql`
|
||||
SELECT count(*)
|
||||
FROM ${tableName}
|
||||
WHERE ourServiceId = ${pni};
|
||||
`;
|
||||
const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams);
|
||||
logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`);
|
||||
|
||||
db.exec(
|
||||
sql`
|
||||
DROP INDEX ${tableName}_date;
|
||||
ALTER TABLE ${tableName} DROP COLUMN createdAt;
|
||||
`[0]
|
||||
);
|
||||
};
|
||||
|
||||
cleanKeys(
|
||||
'kyberPreKeys',
|
||||
db,
|
||||
logger,
|
||||
'updateToSchemaVersion920/kyberPreKeys',
|
||||
sqlFragment`kyberPreKeys`,
|
||||
sqlFragment`createdAt`
|
||||
sqlFragment`createdAt`,
|
||||
sqlFragment`ourServiceId`
|
||||
);
|
||||
cleanKeys(
|
||||
'signedPreKeys',
|
||||
db,
|
||||
logger,
|
||||
'updateToSchemaVersion920/signedPreKeys',
|
||||
sqlFragment`signedPreKeys`,
|
||||
sqlFragment`created_at`
|
||||
sqlFragment`created_at`,
|
||||
sqlFragment`ourServiceId`
|
||||
);
|
||||
|
||||
logger.info('updateToSchemaVersion920: Done with deletions');
|
||||
})();
|
||||
|
||||
logger.info('updateToSchemaVersion920: Starting vacuum...');
|
||||
db.pragma('user_version = 920');
|
||||
|
||||
logger.info(
|
||||
'updateToSchemaVersion920: user_version set to 920. Starting vacuum...'
|
||||
);
|
||||
db.exec('VACUUM;');
|
||||
logger.info('updateToSchemaVersion920: Vacuum complete.');
|
||||
|
||||
db.pragma('user_version = 920');
|
||||
|
||||
logger.info('updateToSchemaVersion920: success!');
|
||||
}
|
||||
|
||||
export function cleanKeys(
|
||||
db: Database,
|
||||
logger: LoggerType,
|
||||
logId: string,
|
||||
tableName: QueryFragment,
|
||||
columnName: QueryFragment,
|
||||
idField: QueryFragment
|
||||
): void {
|
||||
// Grab our PNI
|
||||
let pni: PniString;
|
||||
const pniJson = db
|
||||
.prepare("SELECT json FROM items WHERE id IS 'pni'")
|
||||
.pluck()
|
||||
.get();
|
||||
try {
|
||||
const pniData = JSON.parse(pniJson);
|
||||
pni = normalizePni(pniData.value, logId);
|
||||
} catch (error) {
|
||||
if (pniJson) {
|
||||
logger.warn(
|
||||
`${logId}: PNI found but did not parse`,
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
} else {
|
||||
logger.info(`${logId}: Our PNI not found`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Do overall count - if it's less than 1000, move on
|
||||
const totalKeys = db
|
||||
.prepare(sql`SELECT count(*) FROM ${tableName};`[0])
|
||||
.pluck(true)
|
||||
.get();
|
||||
logger.info(`${logId}: Found ${totalKeys} total keys`);
|
||||
if (totalKeys < 1000) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab PNI-specific count
|
||||
const [
|
||||
beforeQuery,
|
||||
beforeParams,
|
||||
] = sql`SELECT count(*) from ${tableName} WHERE ${idField} = ${pni}`;
|
||||
const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams);
|
||||
logger.info(`${logId}: Found ${beforeKeys} keys for PNI`);
|
||||
|
||||
// Create index to help us with all these queries
|
||||
db.exec(
|
||||
sql`
|
||||
ALTER TABLE ${tableName}
|
||||
ADD COLUMN createdAt NUMBER
|
||||
GENERATED ALWAYS AS (json_extract(json, '$.${columnName}'));
|
||||
|
||||
CREATE INDEX ${tableName}_date
|
||||
ON ${tableName} (${idField}, createdAt);
|
||||
`[0]
|
||||
);
|
||||
logger.info(`${logId}: Temporary index created`);
|
||||
|
||||
// Fetch 500th-oldest timestamp for PNI
|
||||
const [oldQuery, oldParams] = sql`
|
||||
SELECT createdAt
|
||||
FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
${idField} = ${pni}
|
||||
ORDER BY createdAt ASC
|
||||
LIMIT 1
|
||||
OFFSET 499
|
||||
`;
|
||||
const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams);
|
||||
logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`);
|
||||
|
||||
// Fetch 500th-newest timestamp for PNI
|
||||
const [newQuery, newParams] = sql`
|
||||
SELECT createdAt
|
||||
FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
${idField} = ${pni}
|
||||
ORDER BY createdAt DESC
|
||||
LIMIT 1
|
||||
OFFSET 499
|
||||
`;
|
||||
const newBoundary = db.prepare(newQuery).pluck(true).get(newParams);
|
||||
logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`);
|
||||
|
||||
// Delete everything in between for PNI
|
||||
let result: RunResult;
|
||||
const [deleteQuery, deleteParams] = sql`
|
||||
DELETE FROM ${tableName}
|
||||
WHERE
|
||||
createdAt IS NOT NULL AND
|
||||
createdAt > ${oldBoundary} AND
|
||||
createdAt < ${newBoundary} AND
|
||||
${idField} = ${pni}
|
||||
LIMIT 10000;
|
||||
`;
|
||||
const preparedQuery = db.prepare(deleteQuery);
|
||||
do {
|
||||
result = preparedQuery.run(deleteParams);
|
||||
logger.info(`${logId}: Deleted ${result.changes} keys`);
|
||||
} while (result.changes > 0);
|
||||
logger.info(`${logId}: Delete is complete!`);
|
||||
|
||||
// Get updated count for PNI
|
||||
const [afterQuery, afterParams] = sql`
|
||||
SELECT count(*)
|
||||
FROM ${tableName}
|
||||
WHERE ${idField} = ${pni};
|
||||
`;
|
||||
const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams);
|
||||
logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`);
|
||||
|
||||
db.exec(
|
||||
sql`
|
||||
DROP INDEX ${tableName}_date;
|
||||
ALTER TABLE ${tableName} DROP COLUMN createdAt;
|
||||
`[0]
|
||||
);
|
||||
}
|
||||
|
|
|
@ -62,6 +62,7 @@ import updateToSchemaVersion83 from './83-mentions';
|
|||
import updateToSchemaVersion84 from './84-all-mentions';
|
||||
import updateToSchemaVersion85 from './85-add-kyber-keys';
|
||||
import updateToSchemaVersion86 from './86-story-replies-index';
|
||||
import updateToSchemaVersion87 from './87-cleanup';
|
||||
import updateToSchemaVersion88 from './88-service-ids';
|
||||
import updateToSchemaVersion89 from './89-call-history';
|
||||
import updateToSchemaVersion90 from './90-delete-story-reply-screenshot';
|
||||
|
@ -775,13 +776,7 @@ function updateToSchemaVersion17(
|
|||
);
|
||||
}
|
||||
|
||||
try {
|
||||
db.exec('DROP INDEX messages_view_once;');
|
||||
} catch (error) {
|
||||
logger.info(
|
||||
'updateToSchemaVersion17: Index messages_view_once did not already exist'
|
||||
);
|
||||
}
|
||||
db.exec('DROP INDEX IF EXISTS messages_view_once;');
|
||||
|
||||
db.exec(`
|
||||
CREATE INDEX messages_view_once ON messages (
|
||||
|
@ -2004,7 +1999,7 @@ export const SCHEMA_VERSIONS = [
|
|||
updateToSchemaVersion84,
|
||||
updateToSchemaVersion85,
|
||||
updateToSchemaVersion86,
|
||||
// version 87 was dropped
|
||||
updateToSchemaVersion87,
|
||||
updateToSchemaVersion88,
|
||||
updateToSchemaVersion89,
|
||||
|
||||
|
|
420
ts/test-node/sql/migration_87_test.ts
Normal file
420
ts/test-node/sql/migration_87_test.ts
Normal file
|
@ -0,0 +1,420 @@
|
|||
// Copyright 2023 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { assert } from 'chai';
|
||||
import type { Database } from '@signalapp/better-sqlite3';
|
||||
import SQL from '@signalapp/better-sqlite3';
|
||||
import { v4 as generateGuid } from 'uuid';
|
||||
import { range } from 'lodash';
|
||||
|
||||
import { insertData, updateToVersion } from './helpers';
|
||||
import type {
|
||||
AciString,
|
||||
PniString,
|
||||
ServiceIdString,
|
||||
} from '../../types/ServiceId';
|
||||
import { normalizeAci, normalizePni } from '../../types/ServiceId';
|
||||
import type {
|
||||
KyberPreKeyType,
|
||||
PreKeyType,
|
||||
SignedPreKeyType,
|
||||
} from '../../sql/Interface';
|
||||
|
||||
type TestingKyberKey = Omit<
|
||||
KyberPreKeyType,
|
||||
'data' | 'isLastResort' | 'isConfirmed' | 'createdAt' | 'ourServiceId'
|
||||
> & {
|
||||
createdAt: number | undefined;
|
||||
ourUuid: PniString | AciString;
|
||||
};
|
||||
type TestingPreKey = Omit<
|
||||
PreKeyType,
|
||||
| 'privateKey'
|
||||
| 'publicKey'
|
||||
| 'isLastResort'
|
||||
| 'isConfirmed'
|
||||
| 'createdAt'
|
||||
| 'ourServiceId'
|
||||
> & {
|
||||
createdAt: number | undefined;
|
||||
ourUuid: PniString | AciString;
|
||||
};
|
||||
type TestingSignedKey = Omit<
|
||||
SignedPreKeyType,
|
||||
'privateKey' | 'publicKey' | 'confirmed' | 'created_at' | 'ourServiceId'
|
||||
> & {
|
||||
created_at: number | undefined;
|
||||
ourUuid: PniString | AciString;
|
||||
};
|
||||
|
||||
describe('SQL/updateToSchemaVersion87(cleanup)', () => {
|
||||
let db: Database;
|
||||
|
||||
const OUR_ACI = normalizeAci(
|
||||
generateGuid(),
|
||||
'updateToSchemaVersion87(cleanup) test'
|
||||
);
|
||||
const OUR_PNI = normalizePni(
|
||||
`PNI:${generateGuid()}`,
|
||||
'updateToSchemaVersion87(cleanup) test'
|
||||
);
|
||||
let idCount = 0;
|
||||
|
||||
beforeEach(() => {
|
||||
db = new SQL(':memory:');
|
||||
updateToVersion(db, 86);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
db.close();
|
||||
});
|
||||
|
||||
function addPni() {
|
||||
insertData(db, 'items', [
|
||||
{
|
||||
id: 'pni',
|
||||
json: {
|
||||
id: 'pni',
|
||||
value: OUR_PNI,
|
||||
},
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
function getCountOfKyberKeys(): number {
|
||||
return db.prepare('SELECT count(*) FROM kyberPreKeys;').pluck(true).get();
|
||||
}
|
||||
function getCountOfPreKeys(): number {
|
||||
return db.prepare('SELECT count(*) FROM preKeys;').pluck(true).get();
|
||||
}
|
||||
function getCountOfSignedKeys(): number {
|
||||
return db.prepare('SELECT count(*) FROM signedPreKeys;').pluck(true).get();
|
||||
}
|
||||
|
||||
function getPragma(): number {
|
||||
return db.prepare('PRAGMA user_version;').pluck(true).get();
|
||||
}
|
||||
|
||||
function generateKyberKey(
|
||||
createdAt: number | undefined,
|
||||
ourServiceId: ServiceIdString
|
||||
): TestingKyberKey {
|
||||
idCount += 1;
|
||||
|
||||
return {
|
||||
createdAt,
|
||||
id: `${ourServiceId}:${idCount}`,
|
||||
keyId: idCount,
|
||||
ourUuid: ourServiceId,
|
||||
};
|
||||
}
|
||||
|
||||
function generatePreKey(
|
||||
createdAt: number | undefined,
|
||||
ourServiceId: ServiceIdString
|
||||
): TestingPreKey {
|
||||
idCount += 1;
|
||||
|
||||
return {
|
||||
createdAt,
|
||||
id: `${ourServiceId}:${idCount}`,
|
||||
keyId: idCount,
|
||||
ourUuid: ourServiceId,
|
||||
};
|
||||
}
|
||||
|
||||
function generateSignedKey(
|
||||
createdAt: number | undefined,
|
||||
ourServiceId: ServiceIdString
|
||||
): TestingSignedKey {
|
||||
idCount += 1;
|
||||
|
||||
return {
|
||||
created_at: createdAt,
|
||||
id: `${ourServiceId}:${idCount}`,
|
||||
keyId: idCount,
|
||||
ourUuid: ourServiceId,
|
||||
};
|
||||
}
|
||||
|
||||
function getRangeOfKyberKeysForInsert(
|
||||
start: number,
|
||||
end: number,
|
||||
ourServiceId: ServiceIdString,
|
||||
options?: {
|
||||
clearCreatedAt?: boolean;
|
||||
}
|
||||
): Array<{ id: string; json: TestingKyberKey }> {
|
||||
return range(start, end).map(createdAt => {
|
||||
const key = generateKyberKey(
|
||||
options?.clearCreatedAt ? undefined : createdAt,
|
||||
ourServiceId
|
||||
);
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
json: key,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
it('handles missing PNI', () => {
|
||||
assert.strictEqual(0, getCountOfKyberKeys());
|
||||
insertData(
|
||||
db,
|
||||
'kyberPreKeys',
|
||||
getRangeOfKyberKeysForInsert(0, 3000, OUR_ACI)
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
});
|
||||
|
||||
describe('kyberPreKeys', () => {
|
||||
it('deletes 2000 extra keys', () => {
|
||||
assert.strictEqual(0, getCountOfKyberKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'kyberPreKeys',
|
||||
getRangeOfKyberKeysForInsert(0, 3000, OUR_PNI)
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfKyberKeys());
|
||||
});
|
||||
|
||||
it('leaves 1000 existing keys alone', () => {
|
||||
assert.strictEqual(0, getCountOfKyberKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'kyberPreKeys',
|
||||
getRangeOfKyberKeysForInsert(0, 1000, OUR_PNI)
|
||||
);
|
||||
assert.strictEqual(1000, getCountOfKyberKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfKyberKeys());
|
||||
});
|
||||
|
||||
it('leaves keys with missing createdAt alone', () => {
|
||||
assert.strictEqual(0, getCountOfKyberKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'kyberPreKeys',
|
||||
getRangeOfKyberKeysForInsert(0, 3000, OUR_PNI, { clearCreatedAt: true })
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
});
|
||||
|
||||
it('leaves extra ACI keys alone, even if above 1000', () => {
|
||||
assert.strictEqual(0, getCountOfKyberKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'kyberPreKeys',
|
||||
getRangeOfKyberKeysForInsert(0, 3000, OUR_ACI)
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfKyberKeys());
|
||||
});
|
||||
});
|
||||
|
||||
describe('preKeys', () => {
|
||||
function getRangeOfPreKeysForInsert(
|
||||
start: number,
|
||||
end: number,
|
||||
ourServiceId: ServiceIdString,
|
||||
options?: {
|
||||
clearCreatedAt?: boolean;
|
||||
}
|
||||
): Array<{ id: string; json: TestingPreKey }> {
|
||||
return range(start, end).map(createdAt => {
|
||||
const key = generatePreKey(
|
||||
options?.clearCreatedAt ? undefined : createdAt,
|
||||
ourServiceId
|
||||
);
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
json: key,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
it('deletes 2000 extra keys', () => {
|
||||
assert.strictEqual(0, getCountOfPreKeys());
|
||||
addPni();
|
||||
insertData(db, 'preKeys', getRangeOfPreKeysForInsert(0, 3000, OUR_PNI));
|
||||
assert.strictEqual(3000, getCountOfPreKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfPreKeys());
|
||||
});
|
||||
|
||||
it('leaves 1000 existing keys alone', () => {
|
||||
assert.strictEqual(0, getCountOfPreKeys());
|
||||
addPni();
|
||||
insertData(db, 'preKeys', getRangeOfPreKeysForInsert(0, 1000, OUR_PNI));
|
||||
assert.strictEqual(1000, getCountOfPreKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfPreKeys());
|
||||
});
|
||||
|
||||
it('leaves keys with missing createdAt alone', () => {
|
||||
assert.strictEqual(0, getCountOfPreKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'preKeys',
|
||||
getRangeOfPreKeysForInsert(0, 3000, OUR_PNI, {
|
||||
clearCreatedAt: true,
|
||||
})
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfPreKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfPreKeys());
|
||||
});
|
||||
|
||||
it('leaves extra ACI keys alone, even if above 1000', () => {
|
||||
assert.strictEqual(0, getCountOfPreKeys());
|
||||
addPni();
|
||||
insertData(db, 'preKeys', getRangeOfPreKeysForInsert(0, 3000, OUR_ACI));
|
||||
assert.strictEqual(3000, getCountOfPreKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfPreKeys());
|
||||
});
|
||||
});
|
||||
|
||||
describe('signedPreKeys', () => {
|
||||
function getRangeOfSignedKeysForInsert(
|
||||
start: number,
|
||||
end: number,
|
||||
ourServiceId: ServiceIdString,
|
||||
options?: {
|
||||
clearCreatedAt?: boolean;
|
||||
}
|
||||
): Array<{ id: string; json: TestingSignedKey }> {
|
||||
return range(start, end).map(createdAt => {
|
||||
const key = generateSignedKey(
|
||||
options?.clearCreatedAt ? undefined : createdAt,
|
||||
ourServiceId
|
||||
);
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
json: key,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
it('deletes 2000 extra keys', () => {
|
||||
assert.strictEqual(0, getCountOfSignedKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'signedPreKeys',
|
||||
getRangeOfSignedKeysForInsert(0, 3000, OUR_PNI)
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfSignedKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfSignedKeys());
|
||||
});
|
||||
|
||||
it('leaves 1000 existing keys alone', () => {
|
||||
assert.strictEqual(0, getCountOfSignedKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'signedPreKeys',
|
||||
getRangeOfSignedKeysForInsert(0, 1000, OUR_PNI)
|
||||
);
|
||||
assert.strictEqual(1000, getCountOfSignedKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(1000, getCountOfSignedKeys());
|
||||
});
|
||||
|
||||
it('leaves keys with missing createdAt alone', () => {
|
||||
assert.strictEqual(0, getCountOfSignedKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'signedPreKeys',
|
||||
getRangeOfSignedKeysForInsert(0, 3000, OUR_PNI, {
|
||||
clearCreatedAt: true,
|
||||
})
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfSignedKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfSignedKeys());
|
||||
});
|
||||
|
||||
it('leaves extra ACI keys alone, even if above 1000', () => {
|
||||
assert.strictEqual(0, getCountOfSignedKeys());
|
||||
addPni();
|
||||
insertData(
|
||||
db,
|
||||
'signedPreKeys',
|
||||
getRangeOfSignedKeysForInsert(0, 3000, OUR_ACI)
|
||||
);
|
||||
assert.strictEqual(3000, getCountOfSignedKeys());
|
||||
assert.strictEqual(86, getPragma());
|
||||
|
||||
updateToVersion(db, 88);
|
||||
|
||||
assert.strictEqual(88, getPragma());
|
||||
assert.strictEqual(3000, getCountOfSignedKeys());
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in a new issue