Remove all IndexedDB migration code

This commit is contained in:
Scott Nonnenberg 2020-04-28 14:18:41 -07:00
parent 752cd75c54
commit 464c814a95
22 changed files with 69 additions and 1673 deletions

29
js/modules/data.d.ts vendored
View file

@ -1,29 +0,0 @@
export function searchMessages(query: string): Promise<Array<any>>;
export function searchConversations(query: string): Promise<Array<any>>;
export function searchMessagesInConversation(
query: string,
conversationId: string
): Promise<Array<any>>;
export function updateStickerLastUsed(
packId: string,
stickerId: number,
time: number
): Promise<void>;
export function updateStickerPackStatus(
packId: string,
status: 'known' | 'downloaded' | 'installed' | 'error' | 'pending',
options?: { timestamp: number }
): Promise<void>;
export function getRecentStickers(): Promise<
Array<{
id: number;
packId: string;
}>
>;
export function updateEmojiUsage(shortName: string): Promise<void>;
export function getRecentEmojis(
limit: number
): Promise<Array<{ shortName: string; lastUsage: string }>>;

View file

@ -1,68 +0,0 @@
/* global indexedDB */
// Module for interacting with IndexedDB without Backbone IndexedDB adapter
// and using promises. Revisit use of `idb` dependency as it might cover
// this functionality.
const { isObject, isNumber } = require('lodash');
exports.open = (name, version, { onUpgradeNeeded } = {}) => {
const request = indexedDB.open(name, version);
return new Promise((resolve, reject) => {
request.onblocked = () => reject(new Error('Database blocked'));
request.onupgradeneeded = event => {
const hasRequestedSpecificVersion = isNumber(version);
if (!hasRequestedSpecificVersion) {
return;
}
const { newVersion, oldVersion } = event;
if (onUpgradeNeeded) {
const { transaction } = event.target;
onUpgradeNeeded({ oldVersion, transaction });
return;
}
reject(
new Error(
'Database upgrade required:' +
` oldVersion: ${oldVersion}, newVersion: ${newVersion}`
)
);
};
request.onerror = event => reject(event.target.error);
request.onsuccess = event => {
const connection = event.target.result;
resolve(connection);
};
});
};
exports.completeTransaction = transaction =>
new Promise((resolve, reject) => {
transaction.addEventListener('abort', event => reject(event.target.error));
transaction.addEventListener('error', event => reject(event.target.error));
transaction.addEventListener('complete', () => resolve());
});
exports.getVersion = async name => {
const connection = await exports.open(name);
const { version } = connection;
connection.close();
return version;
};
exports.getCount = async ({ store } = {}) => {
if (!isObject(store)) {
throw new TypeError("'store' is required");
}
const request = store.count();
return new Promise((resolve, reject) => {
request.onerror = event => reject(event.target.error);
request.onsuccess = event => resolve(event.target.result);
});
};

View file

@ -1,3 +0,0 @@
export function deferredToPromise<T>(
deferred: JQuery.Deferred<any, any, any>
): Promise<T>;

View file

@ -1,3 +0,0 @@
exports.deferredToPromise = deferred =>
// eslint-disable-next-line more/no-then
new Promise((resolve, reject) => deferred.then(resolve, reject));

View file

@ -1,146 +1,13 @@
/* global window, Whisper, textsecure, setTimeout */
const { isFunction } = require('lodash');
const MessageDataMigrator = require('./messages_data_migrator');
const {
run,
getLatestVersion,
getDatabase,
} = require('./migrations/migrations');
/* global window, Whisper, setTimeout */
const MESSAGE_MINIMUM_VERSION = 7;
module.exports = {
doesDatabaseExist,
mandatoryMessageUpgrade,
MESSAGE_MINIMUM_VERSION,
migrateAllToSQLCipher,
removeDatabase,
runMigrations,
};
async function runMigrations() {
window.log.info('Run migrations on database with attachment data');
await run({
Backbone: window.Backbone,
logger: window.log,
});
Whisper.Database.migrations[0].version = getLatestVersion();
}
async function mandatoryMessageUpgrade({ upgradeMessageSchema } = {}) {
if (!isFunction(upgradeMessageSchema)) {
throw new Error(
'mandatoryMessageUpgrade: upgradeMessageSchema must be a function!'
);
}
const NUM_MESSAGES_PER_BATCH = 10;
window.log.info(
'upgradeMessages: Mandatory message schema upgrade started.',
`Target version: ${MESSAGE_MINIMUM_VERSION}`
);
let isMigrationWithoutIndexComplete = false;
while (!isMigrationWithoutIndexComplete) {
const database = getDatabase();
// eslint-disable-next-line no-await-in-loop
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
{
databaseName: database.name,
minDatabaseVersion: database.version,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
maxVersion: MESSAGE_MINIMUM_VERSION,
BackboneMessage: Whisper.Message,
saveMessage: window.Signal.Data.saveLegacyMessage,
}
);
window.log.info(
'upgradeMessages: upgrade without index',
batchWithoutIndex
);
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
}
window.log.info('upgradeMessages: upgrade without index complete!');
let isMigrationWithIndexComplete = false;
while (!isMigrationWithIndexComplete) {
// eslint-disable-next-line no-await-in-loop
const batchWithIndex = await MessageDataMigrator.processNext({
BackboneMessage: Whisper.Message,
BackboneMessageCollection: Whisper.MessageCollection,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
getMessagesNeedingUpgrade:
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
saveMessage: window.Signal.Data.saveLegacyMessage,
maxVersion: MESSAGE_MINIMUM_VERSION,
});
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
isMigrationWithIndexComplete = batchWithIndex.done;
}
window.log.info('upgradeMessages: upgrade with index complete!');
window.log.info('upgradeMessages: Message schema upgrade complete');
}
async function migrateAllToSQLCipher({ writeNewAttachmentData, Views } = {}) {
if (!isFunction(writeNewAttachmentData)) {
throw new Error(
'migrateAllToSQLCipher: writeNewAttachmentData must be a function'
);
}
if (!Views) {
throw new Error('migrateAllToSQLCipher: Views must be provided!');
}
let totalMessages;
const db = await Whisper.Database.open();
function showMigrationStatus(current) {
const status = `${current}/${totalMessages}`;
Views.Initialization.setMessage(
window.i18n('migratingToSQLCipher', [status])
);
}
try {
totalMessages = await MessageDataMigrator.getNumMessages({
connection: db,
});
} catch (error) {
window.log.error(
'background.getNumMessages error:',
error && error.stack ? error.stack : error
);
totalMessages = 0;
}
if (totalMessages) {
window.log.info(`About to migrate ${totalMessages} messages`);
showMigrationStatus(0);
} else {
window.log.info('About to migrate non-messages');
}
await window.Signal.migrateToSQL({
db,
clearStores: Whisper.Database.clearStores,
handleDOMException: Whisper.Database.handleDOMException,
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
countCallback: count => {
window.log.info(`Migration: ${count} messages complete`);
showMigrationStatus(count);
},
writeNewAttachmentData,
});
db.close();
}
async function doesDatabaseExist() {
window.log.info('Checking for the existence of IndexedDB data...');
return new Promise((resolve, reject) => {

View file

@ -1,18 +1,10 @@
// Module to upgrade the schema of messages, e.g. migrate attachments to disk.
// `dangerouslyProcessAllWithoutIndex` purposely doesnt rely on our Backbone
// IndexedDB adapter to prevent automatic migrations. Rather, it uses direct
// IndexedDB access. This includes avoiding usage of `storage` module which uses
// Backbone under the hood.
// Ensures that messages in database are at the right schema.
/* global IDBKeyRange, window */
/* global window */
const { isFunction, isNumber, isObject, isString, last } = require('lodash');
const { isFunction, isNumber } = require('lodash');
const database = require('./database');
const Message = require('./types/message');
const settings = require('./settings');
const MESSAGES_STORE_NAME = 'messages';
exports.processNext = async ({
BackboneMessage,
@ -96,310 +88,3 @@ exports.processNext = async ({
totalDuration,
};
};
exports.dangerouslyProcessAllWithoutIndex = async ({
databaseName,
minDatabaseVersion,
numMessagesPerBatch,
upgradeMessageSchema,
logger,
maxVersion = Message.CURRENT_SCHEMA_VERSION,
saveMessage,
BackboneMessage,
} = {}) => {
if (!isString(databaseName)) {
throw new TypeError("'databaseName' must be a string");
}
if (!isNumber(minDatabaseVersion)) {
throw new TypeError("'minDatabaseVersion' must be a number");
}
if (!isNumber(numMessagesPerBatch)) {
throw new TypeError("'numMessagesPerBatch' must be a number");
}
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
if (!isFunction(BackboneMessage)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
if (!isFunction(saveMessage)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
const connection = await database.open(databaseName);
const databaseVersion = connection.version;
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
logger.info('Database status', {
databaseVersion,
isValidDatabaseVersion,
minDatabaseVersion,
});
if (!isValidDatabaseVersion) {
throw new Error(
`Expected database version (${databaseVersion})` +
` to be at least ${minDatabaseVersion}`
);
}
// NOTE: Even if we make this async using `then`, requesting `count` on an
// IndexedDB store blocks all subsequent transactions, so we might as well
// explicitly wait for it here:
const numTotalMessages = await exports.getNumMessages({ connection });
const migrationStartTime = Date.now();
let numCumulativeMessagesProcessed = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
// eslint-disable-next-line no-await-in-loop
const status = await _processBatch({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
saveMessage,
BackboneMessage,
});
if (status.done) {
break;
}
numCumulativeMessagesProcessed += status.numMessagesProcessed;
logger.info(
'Upgrade message schema:',
Object.assign({}, status, {
numTotalMessages,
numCumulativeMessagesProcessed,
})
);
}
logger.info('Close database connection');
connection.close();
const totalDuration = Date.now() - migrationStartTime;
logger.info('Attachment migration complete:', {
totalDuration,
totalMessagesProcessed: numCumulativeMessagesProcessed,
});
};
exports.processNextBatchWithoutIndex = async ({
databaseName,
minDatabaseVersion,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
} = {}) => {
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
const connection = await _getConnection({ databaseName, minDatabaseVersion });
const batch = await _processBatch({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
});
return batch;
};
// Private API
const _getConnection = async ({ databaseName, minDatabaseVersion }) => {
if (!isString(databaseName)) {
throw new TypeError("'databaseName' must be a string");
}
if (!isNumber(minDatabaseVersion)) {
throw new TypeError("'minDatabaseVersion' must be a number");
}
const connection = await database.open(databaseName);
const databaseVersion = connection.version;
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
if (!isValidDatabaseVersion) {
throw new Error(
`Expected database version (${databaseVersion})` +
` to be at least ${minDatabaseVersion}`
);
}
return connection;
};
const _processBatch = async ({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
} = {}) => {
if (!isObject(connection)) {
throw new TypeError('_processBatch: connection must be a string');
}
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError('_processBatch: upgradeMessageSchema is required');
}
if (!isNumber(numMessagesPerBatch)) {
throw new TypeError('_processBatch: numMessagesPerBatch is required');
}
if (!isNumber(maxVersion)) {
throw new TypeError('_processBatch: maxVersion is required');
}
if (!isFunction(BackboneMessage)) {
throw new TypeError('_processBatch: BackboneMessage is required');
}
if (!isFunction(saveMessage)) {
throw new TypeError('_processBatch: saveMessage is required');
}
const isAttachmentMigrationComplete = await settings.isAttachmentMigrationComplete(
connection
);
if (isAttachmentMigrationComplete) {
return {
done: true,
};
}
const lastProcessedIndex = await settings.getAttachmentMigrationLastProcessedIndex(
connection
);
const fetchUnprocessedMessagesStartTime = Date.now();
let unprocessedMessages;
try {
unprocessedMessages = await _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex(
{
connection,
count: numMessagesPerBatch,
lastIndex: lastProcessedIndex,
}
);
} catch (error) {
window.log.error(
'_processBatch error:',
error && error.stack ? error.stack : error
);
await settings.markAttachmentMigrationComplete(connection);
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
return {
done: true,
};
}
const fetchDuration = Date.now() - fetchUnprocessedMessagesStartTime;
const upgradeStartTime = Date.now();
const upgradedMessages = await Promise.all(
unprocessedMessages.map(message =>
upgradeMessageSchema(message, { maxVersion })
)
);
const upgradeDuration = Date.now() - upgradeStartTime;
const saveMessagesStartTime = Date.now();
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readwrite');
const transactionCompletion = database.completeTransaction(transaction);
await Promise.all(
upgradedMessages.map(message =>
saveMessage(message, { Message: BackboneMessage })
)
);
await transactionCompletion;
const saveDuration = Date.now() - saveMessagesStartTime;
const numMessagesProcessed = upgradedMessages.length;
const done = numMessagesProcessed < numMessagesPerBatch;
const lastMessage = last(upgradedMessages);
const newLastProcessedIndex = lastMessage ? lastMessage.id : null;
if (!done) {
await settings.setAttachmentMigrationLastProcessedIndex(
connection,
newLastProcessedIndex
);
} else {
await settings.markAttachmentMigrationComplete(connection);
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
}
const batchTotalDuration = Date.now() - fetchUnprocessedMessagesStartTime;
return {
batchTotalDuration,
done,
fetchDuration,
lastProcessedIndex,
newLastProcessedIndex,
numMessagesProcessed,
saveDuration,
targetSchemaVersion: Message.CURRENT_SCHEMA_VERSION,
upgradeDuration,
};
};
// NOTE: Named dangerous because it is not as efficient as using our
// `messages` `schemaVersion` index:
const _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex = ({
connection,
count,
lastIndex,
} = {}) => {
if (!isObject(connection)) {
throw new TypeError("'connection' is required");
}
if (!isNumber(count)) {
throw new TypeError("'count' is required");
}
if (lastIndex && !isString(lastIndex)) {
throw new TypeError("'lastIndex' must be a string");
}
const hasLastIndex = Boolean(lastIndex);
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
const excludeLowerBound = true;
const range = hasLastIndex
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
: undefined;
return new Promise((resolve, reject) => {
const items = [];
const request = messagesStore.openCursor(range);
request.onsuccess = event => {
const cursor = event.target.result;
const hasMoreData = Boolean(cursor);
if (!hasMoreData || items.length === count) {
resolve(items);
return;
}
const item = cursor.value;
items.push(item);
cursor.continue();
};
request.onerror = event => reject(event.target.error);
});
};
exports.getNumMessages = async ({ connection } = {}) => {
if (!isObject(connection)) {
throw new TypeError("'connection' is required");
}
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
const numTotalMessages = await database.getCount({ store: messagesStore });
await database.completeTransaction(transaction);
return numTotalMessages;
};

View file

@ -1,409 +0,0 @@
/* global window, IDBKeyRange */
const { includes, isFunction, isString, last, map } = require('lodash');
const {
bulkAddSessions,
bulkAddIdentityKeys,
bulkAddPreKeys,
bulkAddSignedPreKeys,
bulkAddItems,
removeSessionById,
removeIdentityKeyById,
removePreKeyById,
removeSignedPreKeyById,
removeItemById,
saveMessages,
_removeMessages,
saveUnprocesseds,
removeUnprocessed,
saveConversations,
_removeConversations,
} = require('../../ts/sql/Client').default;
const {
getMessageExportLastIndex,
setMessageExportLastIndex,
getMessageExportCount,
setMessageExportCount,
getUnprocessedExportLastIndex,
setUnprocessedExportLastIndex,
} = require('./settings');
const { migrateConversation } = require('./types/conversation');
module.exports = {
migrateToSQL,
};
async function migrateToSQL({
db,
clearStores,
handleDOMException,
countCallback,
arrayBufferToString,
writeNewAttachmentData,
}) {
if (!db) {
throw new Error('Need db for IndexedDB connection!');
}
if (!isFunction(clearStores)) {
throw new Error('Need clearStores function!');
}
if (!isFunction(arrayBufferToString)) {
throw new Error('Need arrayBufferToString function!');
}
if (!isFunction(handleDOMException)) {
throw new Error('Need handleDOMException function!');
}
window.log.info('migrateToSQL: start');
let [lastIndex, doneSoFar] = await Promise.all([
getMessageExportLastIndex(db),
getMessageExportCount(db),
]);
let complete = false;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
save: saveMessages,
remove: _removeMessages,
storeName: 'messages',
handleDOMException,
lastIndex,
});
({ complete, lastIndex } = status);
// eslint-disable-next-line no-await-in-loop
await Promise.all([
setMessageExportCount(db, doneSoFar),
setMessageExportLastIndex(db, lastIndex),
]);
const { count } = status;
doneSoFar += count;
if (countCallback) {
countCallback(doneSoFar);
}
}
window.log.info('migrateToSQL: migrate of messages complete');
try {
await clearStores(['messages']);
} catch (error) {
window.log.warn('Failed to clear messages store');
}
lastIndex = await getUnprocessedExportLastIndex(db);
complete = false;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
save: async array => {
await Promise.all(
map(array, async item => {
// In the new database, we can't store ArrayBuffers, so we turn these two
// fields into strings like MessageReceiver now does before save.
// Need to set it to version two, since we're using Base64 strings now
// eslint-disable-next-line no-param-reassign
item.version = 2;
if (item.envelope) {
// eslint-disable-next-line no-param-reassign
item.envelope = arrayBufferToString(item.envelope);
}
if (item.decrypted) {
// eslint-disable-next-line no-param-reassign
item.decrypted = arrayBufferToString(item.decrypted);
}
})
);
await saveUnprocesseds(array);
},
remove: removeUnprocessed,
storeName: 'unprocessed',
handleDOMException,
lastIndex,
});
({ complete, lastIndex } = status);
// eslint-disable-next-line no-await-in-loop
await setUnprocessedExportLastIndex(db, lastIndex);
}
window.log.info('migrateToSQL: migrate of unprocessed complete');
try {
await clearStores(['unprocessed']);
} catch (error) {
window.log.warn('Failed to clear unprocessed store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: async array => {
const conversations = await Promise.all(
map(array, async conversation =>
migrateConversation(conversation, { writeNewAttachmentData })
)
);
saveConversations(conversations);
},
remove: _removeConversations,
storeName: 'conversations',
handleDOMException,
lastIndex,
// Because we're doing real-time moves to the filesystem, minimize parallelism
batchSize: 5,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of conversations complete');
try {
await clearStores(['conversations']);
} catch (error) {
window.log.warn('Failed to clear conversations store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSessions,
remove: removeSessionById,
storeName: 'sessions',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of sessions complete');
try {
await clearStores(['sessions']);
} catch (error) {
window.log.warn('Failed to clear sessions store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddIdentityKeys,
remove: removeIdentityKeyById,
storeName: 'identityKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of identityKeys complete');
try {
await clearStores(['identityKeys']);
} catch (error) {
window.log.warn('Failed to clear identityKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddPreKeys,
remove: removePreKeyById,
storeName: 'preKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of preKeys complete');
try {
await clearStores(['preKeys']);
} catch (error) {
window.log.warn('Failed to clear preKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSignedPreKeys,
remove: removeSignedPreKeyById,
storeName: 'signedPreKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of signedPreKeys complete');
try {
await clearStores(['signedPreKeys']);
} catch (error) {
window.log.warn('Failed to clear signedPreKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddItems,
remove: removeItemById,
storeName: 'items',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of items complete');
// Note: we don't clear the items store because it contains important metadata which,
// if this process fails, will be crucial to going through this process again.
window.log.info('migrateToSQL: complete');
}
async function migrateStoreToSQLite({
db,
save,
remove,
storeName,
handleDOMException,
lastIndex = null,
batchSize = 50,
}) {
if (!db) {
throw new Error('Need db for IndexedDB connection!');
}
if (!isFunction(save)) {
throw new Error('Need save function!');
}
if (!isFunction(remove)) {
throw new Error('Need remove function!');
}
if (!isString(storeName)) {
throw new Error('Need storeName!');
}
if (!isFunction(handleDOMException)) {
throw new Error('Need handleDOMException for error handling!');
}
if (!includes(db.objectStoreNames, storeName)) {
return {
complete: true,
count: 0,
};
}
const queryPromise = new Promise((resolve, reject) => {
const items = [];
const transaction = db.transaction(storeName, 'readonly');
transaction.onerror = () => {
handleDOMException(
'migrateToSQLite transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = () => {};
const store = transaction.objectStore(storeName);
const excludeLowerBound = true;
const range = lastIndex
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
: undefined;
const request = store.openCursor(range);
request.onerror = () => {
handleDOMException(
'migrateToSQLite: request error',
request.error,
reject
);
};
request.onsuccess = event => {
const cursor = event.target.result;
if (!cursor || !cursor.value) {
return resolve({
complete: true,
items,
});
}
const item = cursor.value;
items.push(item);
if (items.length >= batchSize) {
return resolve({
complete: false,
items,
});
}
return cursor.continue();
};
});
const { items, complete } = await queryPromise;
if (items.length) {
// Because of the force save and some failed imports, we're going to delete before
// we attempt to insert.
const ids = items.map(item => item.id);
await remove(ids);
// We need to pass forceSave parameter, because these items already have an
// id key. Normally, this call would be interpreted as an update request.
await save(items, { forceSave: true });
}
const lastItem = last(items);
const id = lastItem ? lastItem.id : null;
return {
complete,
count: items.length,
lastIndex: id,
};
}

View file

@ -1,17 +0,0 @@
exports.run = ({ transaction, logger }) => {
const messagesStore = transaction.objectStore('messages');
logger.info("Create message attachment metadata index: 'hasAttachments'");
messagesStore.createIndex(
'hasAttachments',
['conversationId', 'hasAttachments', 'received_at'],
{ unique: false }
);
['hasVisualMediaAttachments', 'hasFileAttachments'].forEach(name => {
logger.info(`Create message attachment metadata index: '${name}'`);
messagesStore.createIndex(name, ['conversationId', 'received_at', name], {
unique: false,
});
});
};

View file

@ -1,35 +0,0 @@
/* global window, Whisper */
const Migrations = require('./migrations');
exports.getPlaceholderMigrations = () => {
const version = Migrations.getLatestVersion();
return [
{
version,
migrate() {
throw new Error(
'Unexpected invocation of placeholder migration!' +
'\n\nMigrations must explicitly be run upon application startup instead' +
' of implicitly via Backbone IndexedDB adapter at any time.'
);
},
},
];
};
exports.getCurrentVersion = () =>
new Promise((resolve, reject) => {
const request = window.indexedDB.open(Whisper.Database.id);
request.onerror = reject;
request.onupgradeneeded = reject;
request.onsuccess = () => {
const db = request.result;
const { version } = db;
return resolve(version);
};
});

View file

@ -1,221 +0,0 @@
/* global window */
const { isString, last } = require('lodash');
const { runMigrations } = require('./run_migrations');
const Migration18 = require('./18');
// IMPORTANT: The migrations below are run on a database that may be very large
// due to attachments being directly stored inside the database. Please avoid
// any expensive operations, e.g. modifying all messages / attachments, etc., as
// it may cause out-of-memory errors for users with long histories:
// https://github.com/signalapp/Signal-Desktop/issues/2163
const migrations = [
{
version: '12.0',
migrate(transaction, next) {
window.log.info('Migration 12');
window.log.info('creating object stores');
const messages = transaction.db.createObjectStore('messages');
messages.createIndex('conversation', ['conversationId', 'received_at'], {
unique: false,
});
messages.createIndex('receipt', 'sent_at', { unique: false });
messages.createIndex('unread', ['conversationId', 'unread'], {
unique: false,
});
messages.createIndex('expires_at', 'expires_at', { unique: false });
const conversations = transaction.db.createObjectStore('conversations');
conversations.createIndex('inbox', 'active_at', { unique: false });
conversations.createIndex('group', 'members', {
unique: false,
multiEntry: true,
});
conversations.createIndex('type', 'type', {
unique: false,
});
conversations.createIndex('search', 'tokens', {
unique: false,
multiEntry: true,
});
transaction.db.createObjectStore('groups');
transaction.db.createObjectStore('sessions');
transaction.db.createObjectStore('identityKeys');
transaction.db.createObjectStore('preKeys');
transaction.db.createObjectStore('signedPreKeys');
transaction.db.createObjectStore('items');
window.log.info('creating debug log');
transaction.db.createObjectStore('debug');
next();
},
},
{
version: '13.0',
migrate(transaction, next) {
window.log.info('Migration 13');
window.log.info('Adding fields to identity keys');
const identityKeys = transaction.objectStore('identityKeys');
const request = identityKeys.openCursor();
const promises = [];
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor) {
const attributes = cursor.value;
attributes.timestamp = 0;
attributes.firstUse = false;
attributes.nonblockingApproval = false;
attributes.verified = 0;
promises.push(
new Promise((resolve, reject) => {
const putRequest = identityKeys.put(attributes, attributes.id);
putRequest.onsuccess = resolve;
putRequest.onerror = error => {
window.log.error(error && error.stack ? error.stack : error);
reject(error);
};
})
);
cursor.continue();
} else {
// no more results
// eslint-disable-next-line more/no-then
Promise.all(promises).then(() => {
next();
});
}
};
request.onerror = event => {
window.log.error(event);
};
},
},
{
version: '14.0',
migrate(transaction, next) {
window.log.info('Migration 14');
window.log.info('Adding unprocessed message store');
const unprocessed = transaction.db.createObjectStore('unprocessed');
unprocessed.createIndex('received', 'timestamp', { unique: false });
next();
},
},
{
version: '15.0',
migrate(transaction, next) {
window.log.info('Migration 15');
window.log.info('Adding messages index for de-duplication');
const messages = transaction.objectStore('messages');
messages.createIndex('unique', ['source', 'sourceDevice', 'sent_at'], {
unique: true,
});
next();
},
},
{
version: '16.0',
migrate(transaction, next) {
window.log.info('Migration 16');
window.log.info('Dropping log table, since we now log to disk');
transaction.db.deleteObjectStore('debug');
next();
},
},
{
version: 17,
async migrate(transaction, next) {
window.log.info('Migration 17');
const start = Date.now();
const messagesStore = transaction.objectStore('messages');
window.log.info(
'Create index from attachment schema version to attachment'
);
messagesStore.createIndex('schemaVersion', 'schemaVersion', {
unique: false,
});
const duration = Date.now() - start;
window.log.info(
'Complete migration to database version 17',
`Duration: ${duration}ms`
);
next();
},
},
{
version: 18,
migrate(transaction, next) {
window.log.info('Migration 18');
const start = Date.now();
Migration18.run({ transaction, logger: window.log });
const duration = Date.now() - start;
window.log.info(
'Complete migration to database version 18',
`Duration: ${duration}ms`
);
next();
},
},
{
version: 19,
migrate(transaction, next) {
window.log.info('Migration 19');
// Empty because we don't want to cause incompatibility with beta users who have
// already run migration 19 when it was object store removal.
next();
},
},
{
version: 20,
migrate(transaction, next) {
window.log.info('Migration 20');
// Empty because we don't want to cause incompatibility with users who have already
// run migration 20 when it was object store removal.
next();
},
},
];
const database = {
id: 'signal',
nolog: true,
migrations,
};
exports.run = ({ Backbone, databaseName, logger } = {}) =>
runMigrations({
Backbone,
logger,
database: Object.assign(
{},
database,
isString(databaseName) ? { id: databaseName } : {}
),
});
exports.getDatabase = () => ({
name: database.id,
version: exports.getLatestVersion(),
});
exports.getLatestVersion = () => {
const lastMigration = last(migrations);
if (!lastMigration) {
return null;
}
return lastMigration.version;
};

View file

@ -1,79 +0,0 @@
/* eslint-env browser */
const { head, isFunction, isObject, isString, last } = require('lodash');
const db = require('../database');
const { deferredToPromise } = require('../deferred_to_promise');
const closeDatabaseConnection = ({ Backbone } = {}) =>
deferredToPromise(Backbone.sync('closeall'));
exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
if (
!isObject(Backbone) ||
!isObject(Backbone.Collection) ||
!isFunction(Backbone.Collection.extend)
) {
throw new TypeError('runMigrations: Backbone is required');
}
if (
!isObject(database) ||
!isString(database.id) ||
!Array.isArray(database.migrations)
) {
throw new TypeError('runMigrations: database is required');
}
if (!isObject(logger)) {
throw new TypeError('runMigrations: logger is required');
}
const {
firstVersion: firstMigrationVersion,
lastVersion: lastMigrationVersion,
} = getMigrationVersions(database);
const databaseVersion = await db.getVersion(database.id);
const isAlreadyUpgraded = databaseVersion >= lastMigrationVersion;
logger.info('Database status', {
firstMigrationVersion,
lastMigrationVersion,
databaseVersion,
isAlreadyUpgraded,
});
if (isAlreadyUpgraded) {
return;
}
const migrationCollection = new (Backbone.Collection.extend({
database,
storeName: 'items',
}))();
// Note: this legacy migration technique is required to bring old clients with
// data in IndexedDB forward into the new world of SQLCipher only.
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
logger.info('Close database connection');
await closeDatabaseConnection({ Backbone });
};
const getMigrationVersions = database => {
if (!isObject(database) || !Array.isArray(database.migrations)) {
throw new TypeError("'database' is required");
}
const firstMigration = head(database.migrations);
const lastMigration = last(database.migrations);
const firstVersion = firstMigration
? parseInt(firstMigration.version, 10)
: null;
const lastVersion = lastMigration
? parseInt(lastMigration.version, 10)
: null;
return { firstVersion, lastVersion };
};

View file

@ -4,7 +4,6 @@ const { bindActionCreators } = require('redux');
const Backbone = require('../../ts/backbone');
const Crypto = require('../../ts/Crypto');
const Data = require('../../ts/sql/Client').default;
const Database = require('./database');
const Emojis = require('./emojis');
const EmojiLib = require('../../ts/components/emoji/lib');
const IndexedDB = require('./indexeddb');
@ -13,7 +12,6 @@ const OS = require('../../ts/OS');
const Stickers = require('./stickers');
const Settings = require('./settings');
const Util = require('../../ts/util');
const { migrateToSQL } = require('./migrate_to_sql');
const Metadata = require('./metadata/SecretSessionCipher');
const RefreshSenderCertificate = require('./refresh_sender_certificate');
const LinkPreviews = require('./link_previews');
@ -75,13 +73,6 @@ const userDuck = require('../../ts/state/ducks/user');
const conversationsSelectors = require('../../ts/state/selectors/conversations');
const searchSelectors = require('../../ts/state/selectors/search');
// Migrations
const {
getPlaceholderMigrations,
getCurrentVersion,
} = require('./migrations/get_placeholder_migrations');
const { run } = require('./migrations/migrations');
// Types
const AttachmentType = require('./types/attachment');
const VisualAttachment = require('./types/visual_attachment');
@ -193,8 +184,6 @@ function initializeMigrations({
getAbsoluteDraftPath,
getAbsoluteStickerPath,
getAbsoluteTempPath,
getPlaceholderMigrations,
getCurrentVersion,
loadAttachmentData,
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
loadPreviewData,
@ -205,7 +194,6 @@ function initializeMigrations({
readDraftData,
readStickerData,
readTempData,
run,
saveAttachmentToDisk,
processNewAttachment: attachment =>
MessageType.processNewAttachment(attachment, {
@ -353,13 +341,11 @@ exports.setup = (options = {}) => {
Components,
Crypto,
Data,
Database,
Emojis,
EmojiLib,
IndexedDB,
LinkPreviews,
Metadata,
migrateToSQL,
Migrations,
Notifications,
OS,