Remove all IndexedDB migration code
This commit is contained in:
parent
752cd75c54
commit
464c814a95
22 changed files with 69 additions and 1673 deletions
|
@ -1120,6 +1120,14 @@
|
|||
"message": "Disconnecting and deleting all data",
|
||||
"description": "Message shown to user when app is disconnected and data deleted"
|
||||
},
|
||||
"deleteOldIndexedDBData": {
|
||||
"message": "You have obsolete data from a prior installation of Signal Desktop. If you choose to continue, it will be deleted and you will start from scratch.",
|
||||
"description": "Shown if user last ran Signal Desktop before October 2018"
|
||||
},
|
||||
"deleteOldData": {
|
||||
"message": "Delete Old Data",
|
||||
"description": "Button to make the delete happen"
|
||||
},
|
||||
"notifications": {
|
||||
"message": "Notifications",
|
||||
"description": "Header for notification settings"
|
||||
|
|
|
@ -344,7 +344,6 @@
|
|||
<script type='text/javascript' src='js/reliable_trigger.js'></script>
|
||||
<script type='text/javascript' src='js/database.js'></script>
|
||||
<script type='text/javascript' src='js/storage.js'></script>
|
||||
<script type='text/javascript' src='js/legacy_storage.js'></script>
|
||||
<script type='text/javascript' src='js/signal_protocol_store.js'></script>
|
||||
<script type='text/javascript' src='js/libtextsecure.js'></script>
|
||||
|
||||
|
|
|
@ -190,10 +190,7 @@
|
|||
|
||||
const { IdleDetector, MessageDataMigrator } = Signal.Workflow;
|
||||
const {
|
||||
mandatoryMessageUpgrade,
|
||||
migrateAllToSQLCipher,
|
||||
removeDatabase,
|
||||
runMigrations,
|
||||
removeDatabase: removeIndexedDB,
|
||||
doesDatabaseExist,
|
||||
} = Signal.IndexedDB;
|
||||
const { Errors, Message } = window.Signal.Types;
|
||||
|
@ -205,11 +202,6 @@
|
|||
} = window.Signal.Migrations;
|
||||
const { Views } = window.Signal;
|
||||
|
||||
// Implicitly used in `indexeddb-backbonejs-adapter`:
|
||||
// https://github.com/signalapp/Signal-Desktop/blob/4033a9f8137e62ed286170ed5d4941982b1d3a64/components/indexeddb-backbonejs-adapter/backbone-indexeddb.js#L569
|
||||
window.onInvalidStateError = error =>
|
||||
window.log.error(error && error.stack ? error.stack : error);
|
||||
|
||||
window.log.info('background page reloaded');
|
||||
window.log.info('environment:', window.getEnvironment());
|
||||
|
||||
|
@ -267,13 +259,56 @@
|
|||
const cancelInitializationMessage = Views.Initialization.setMessage();
|
||||
|
||||
const version = await window.Signal.Data.getItemById('version');
|
||||
let isIndexedDBPresent = false;
|
||||
if (!version) {
|
||||
isIndexedDBPresent = await doesDatabaseExist();
|
||||
const isIndexedDBPresent = await doesDatabaseExist();
|
||||
if (isIndexedDBPresent) {
|
||||
window.installStorage(window.legacyStorage);
|
||||
window.log.info('Start IndexedDB migrations');
|
||||
await runMigrations();
|
||||
window.log.info('Found IndexedDB database.');
|
||||
try {
|
||||
window.log.info('Confirming deletion of old data with user...');
|
||||
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const dialog = new Whisper.ConfirmationDialogView({
|
||||
message: window.i18n('deleteOldIndexedDBData'),
|
||||
okText: window.i18n('deleteOldData'),
|
||||
cancelText: window.i18n('quit'),
|
||||
resolve,
|
||||
reject,
|
||||
});
|
||||
document.body.append(dialog.el);
|
||||
dialog.focusCancel();
|
||||
});
|
||||
} catch (error) {
|
||||
window.log.info(
|
||||
'User chose not to delete old data. Shutting down.',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
window.shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
window.log.info('Deleting all previously-migrated data in SQL...');
|
||||
window.log.info('Deleting IndexedDB file...');
|
||||
|
||||
await Promise.all([
|
||||
removeIndexedDB(),
|
||||
window.Signal.Data.removeAll(),
|
||||
window.Signal.Data.removeIndexedDBFiles(),
|
||||
]);
|
||||
window.log.info('Done with SQL deletion and IndexedDB file deletion.');
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'Failed to remove IndexedDB file or remove SQL data:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
}
|
||||
|
||||
// Set a flag to delete IndexedDB on next startup if it wasn't deleted just now.
|
||||
// We need to use direct data calls, since storage isn't ready yet.
|
||||
await window.Signal.Data.createOrUpdateItem({
|
||||
id: 'indexeddb-delete-needed',
|
||||
value: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -424,24 +459,6 @@
|
|||
},
|
||||
};
|
||||
|
||||
if (isIndexedDBPresent) {
|
||||
await mandatoryMessageUpgrade({ upgradeMessageSchema });
|
||||
await migrateAllToSQLCipher({ writeNewAttachmentData, Views });
|
||||
await removeDatabase();
|
||||
try {
|
||||
await window.Signal.Data.removeIndexedDBFiles();
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'Failed to remove IndexedDB files:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
}
|
||||
|
||||
window.installStorage(window.newStorage);
|
||||
await window.storage.fetch();
|
||||
await storage.put('indexeddb-delete-needed', true);
|
||||
}
|
||||
|
||||
// How long since we were last running?
|
||||
const now = Date.now();
|
||||
const lastHeartbeat = storage.get('lastHeartbeat');
|
||||
|
|
109
js/database.js
109
js/database.js
|
@ -1,14 +1,9 @@
|
|||
/* global _: false */
|
||||
/* global Backbone: false */
|
||||
|
||||
/* global Whisper: false */
|
||||
|
||||
// eslint-disable-next-line func-names
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
const { getPlaceholderMigrations } = window.Signal.Migrations;
|
||||
|
||||
window.Whisper = window.Whisper || {};
|
||||
window.Whisper.Database = window.Whisper.Database || {};
|
||||
window.Whisper.Database.id = window.Whisper.Database.id || 'signal';
|
||||
|
@ -23,108 +18,4 @@
|
|||
);
|
||||
reject(error || new Error(prefix));
|
||||
};
|
||||
|
||||
function clearStores(db, names) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const storeNames = names || db.objectStoreNames;
|
||||
window.log.info('Clearing these indexeddb stores:', storeNames);
|
||||
const transaction = db.transaction(storeNames, 'readwrite');
|
||||
|
||||
let finished = false;
|
||||
const finish = via => {
|
||||
window.log.info('clearing all stores done via', via);
|
||||
if (finished) {
|
||||
resolve();
|
||||
}
|
||||
finished = true;
|
||||
};
|
||||
|
||||
transaction.oncomplete = finish.bind(null, 'transaction complete');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
'clearStores transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
|
||||
let count = 0;
|
||||
|
||||
// can't use built-in .forEach because db.objectStoreNames is not a plain array
|
||||
_.forEach(storeNames, storeName => {
|
||||
const store = transaction.objectStore(storeName);
|
||||
const request = store.clear();
|
||||
|
||||
request.onsuccess = () => {
|
||||
count += 1;
|
||||
window.log.info('Done clearing store', storeName);
|
||||
|
||||
if (count >= storeNames.length) {
|
||||
window.log.info('Done clearing indexeddb stores');
|
||||
finish('clears complete');
|
||||
}
|
||||
};
|
||||
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
'clearStores request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
Whisper.Database.open = () => {
|
||||
const { migrations } = Whisper.Database;
|
||||
const { version } = migrations[migrations.length - 1];
|
||||
const DBOpenRequest = window.indexedDB.open(Whisper.Database.id, version);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// these two event handlers act on the IDBDatabase object,
|
||||
// when the database is opened successfully, or not
|
||||
DBOpenRequest.onerror = reject;
|
||||
DBOpenRequest.onsuccess = () => resolve(DBOpenRequest.result);
|
||||
|
||||
// This event handles the event whereby a new version of
|
||||
// the database needs to be created Either one has not
|
||||
// been created before, or a new version number has been
|
||||
// submitted via the window.indexedDB.open line above
|
||||
DBOpenRequest.onupgradeneeded = reject;
|
||||
});
|
||||
};
|
||||
|
||||
Whisper.Database.clear = async () => {
|
||||
const db = await Whisper.Database.open();
|
||||
await clearStores(db);
|
||||
db.close();
|
||||
};
|
||||
|
||||
Whisper.Database.clearStores = async storeNames => {
|
||||
const db = await Whisper.Database.open();
|
||||
await clearStores(db, storeNames);
|
||||
db.close();
|
||||
};
|
||||
|
||||
Whisper.Database.close = () => window.wrapDeferred(Backbone.sync('closeall'));
|
||||
|
||||
Whisper.Database.drop = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const request = window.indexedDB.deleteDatabase(Whisper.Database.id);
|
||||
|
||||
request.onblocked = () => {
|
||||
reject(new Error('Error deleting database: Blocked.'));
|
||||
};
|
||||
request.onupgradeneeded = () => {
|
||||
reject(new Error('Error deleting database: Upgrade needed.'));
|
||||
};
|
||||
request.onerror = () => {
|
||||
reject(new Error('Error deleting database.'));
|
||||
};
|
||||
|
||||
request.onsuccess = resolve;
|
||||
});
|
||||
|
||||
Whisper.Database.migrations = getPlaceholderMigrations();
|
||||
})();
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
/* global Backbone, Whisper */
|
||||
|
||||
/* eslint-disable more/no-then */
|
||||
|
||||
// eslint-disable-next-line func-names
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
window.Whisper = window.Whisper || {};
|
||||
const Item = Backbone.Model.extend({
|
||||
database: Whisper.Database,
|
||||
storeName: 'items',
|
||||
});
|
||||
const ItemCollection = Backbone.Collection.extend({
|
||||
model: Item,
|
||||
storeName: 'items',
|
||||
database: Whisper.Database,
|
||||
});
|
||||
|
||||
let ready = false;
|
||||
const items = new ItemCollection();
|
||||
items.on('reset', () => {
|
||||
ready = true;
|
||||
});
|
||||
window.legacyStorage = {
|
||||
/** ***************************
|
||||
*** Base Storage Routines ***
|
||||
**************************** */
|
||||
put(key, value) {
|
||||
if (value === undefined) {
|
||||
throw new Error('Tried to store undefined');
|
||||
}
|
||||
if (!ready) {
|
||||
window.log.warn(
|
||||
'Called storage.put before storage is ready. key:',
|
||||
key
|
||||
);
|
||||
}
|
||||
const item = items.add({ id: key, value }, { merge: true });
|
||||
return new Promise((resolve, reject) => {
|
||||
item.save().then(resolve, reject);
|
||||
});
|
||||
},
|
||||
|
||||
get(key, defaultValue) {
|
||||
const item = items.get(`${key}`);
|
||||
if (!item) {
|
||||
return defaultValue;
|
||||
}
|
||||
return item.get('value');
|
||||
},
|
||||
|
||||
remove(key) {
|
||||
const item = items.get(`${key}`);
|
||||
if (item) {
|
||||
items.remove(item);
|
||||
return new Promise((resolve, reject) => {
|
||||
item.destroy().then(resolve, reject);
|
||||
});
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
|
||||
onready(callback) {
|
||||
if (ready) {
|
||||
callback();
|
||||
} else {
|
||||
items.on('reset', callback);
|
||||
}
|
||||
},
|
||||
|
||||
fetch() {
|
||||
return new Promise((resolve, reject) => {
|
||||
items
|
||||
.fetch({ reset: true })
|
||||
.fail(() =>
|
||||
reject(
|
||||
new Error(
|
||||
'Failed to fetch from storage.' +
|
||||
' This may be due to an unexpected database version.'
|
||||
)
|
||||
)
|
||||
)
|
||||
.always(resolve);
|
||||
});
|
||||
},
|
||||
|
||||
reset() {
|
||||
items.reset();
|
||||
},
|
||||
};
|
||||
})();
|
29
js/modules/data.d.ts
vendored
29
js/modules/data.d.ts
vendored
|
@ -1,29 +0,0 @@
|
|||
export function searchMessages(query: string): Promise<Array<any>>;
|
||||
export function searchConversations(query: string): Promise<Array<any>>;
|
||||
export function searchMessagesInConversation(
|
||||
query: string,
|
||||
conversationId: string
|
||||
): Promise<Array<any>>;
|
||||
|
||||
export function updateStickerLastUsed(
|
||||
packId: string,
|
||||
stickerId: number,
|
||||
time: number
|
||||
): Promise<void>;
|
||||
export function updateStickerPackStatus(
|
||||
packId: string,
|
||||
status: 'known' | 'downloaded' | 'installed' | 'error' | 'pending',
|
||||
options?: { timestamp: number }
|
||||
): Promise<void>;
|
||||
|
||||
export function getRecentStickers(): Promise<
|
||||
Array<{
|
||||
id: number;
|
||||
packId: string;
|
||||
}>
|
||||
>;
|
||||
|
||||
export function updateEmojiUsage(shortName: string): Promise<void>;
|
||||
export function getRecentEmojis(
|
||||
limit: number
|
||||
): Promise<Array<{ shortName: string; lastUsage: string }>>;
|
|
@ -1,68 +0,0 @@
|
|||
/* global indexedDB */
|
||||
|
||||
// Module for interacting with IndexedDB without Backbone IndexedDB adapter
|
||||
// and using promises. Revisit use of `idb` dependency as it might cover
|
||||
// this functionality.
|
||||
|
||||
const { isObject, isNumber } = require('lodash');
|
||||
|
||||
exports.open = (name, version, { onUpgradeNeeded } = {}) => {
|
||||
const request = indexedDB.open(name, version);
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onblocked = () => reject(new Error('Database blocked'));
|
||||
|
||||
request.onupgradeneeded = event => {
|
||||
const hasRequestedSpecificVersion = isNumber(version);
|
||||
if (!hasRequestedSpecificVersion) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { newVersion, oldVersion } = event;
|
||||
if (onUpgradeNeeded) {
|
||||
const { transaction } = event.target;
|
||||
onUpgradeNeeded({ oldVersion, transaction });
|
||||
return;
|
||||
}
|
||||
|
||||
reject(
|
||||
new Error(
|
||||
'Database upgrade required:' +
|
||||
` oldVersion: ${oldVersion}, newVersion: ${newVersion}`
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
request.onerror = event => reject(event.target.error);
|
||||
|
||||
request.onsuccess = event => {
|
||||
const connection = event.target.result;
|
||||
resolve(connection);
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
exports.completeTransaction = transaction =>
|
||||
new Promise((resolve, reject) => {
|
||||
transaction.addEventListener('abort', event => reject(event.target.error));
|
||||
transaction.addEventListener('error', event => reject(event.target.error));
|
||||
transaction.addEventListener('complete', () => resolve());
|
||||
});
|
||||
|
||||
exports.getVersion = async name => {
|
||||
const connection = await exports.open(name);
|
||||
const { version } = connection;
|
||||
connection.close();
|
||||
return version;
|
||||
};
|
||||
|
||||
exports.getCount = async ({ store } = {}) => {
|
||||
if (!isObject(store)) {
|
||||
throw new TypeError("'store' is required");
|
||||
}
|
||||
|
||||
const request = store.count();
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onerror = event => reject(event.target.error);
|
||||
request.onsuccess = event => resolve(event.target.result);
|
||||
});
|
||||
};
|
3
js/modules/deferred_to_promise.d.ts
vendored
3
js/modules/deferred_to_promise.d.ts
vendored
|
@ -1,3 +0,0 @@
|
|||
export function deferredToPromise<T>(
|
||||
deferred: JQuery.Deferred<any, any, any>
|
||||
): Promise<T>;
|
|
@ -1,3 +0,0 @@
|
|||
exports.deferredToPromise = deferred =>
|
||||
// eslint-disable-next-line more/no-then
|
||||
new Promise((resolve, reject) => deferred.then(resolve, reject));
|
|
@ -1,146 +1,13 @@
|
|||
/* global window, Whisper, textsecure, setTimeout */
|
||||
|
||||
const { isFunction } = require('lodash');
|
||||
|
||||
const MessageDataMigrator = require('./messages_data_migrator');
|
||||
const {
|
||||
run,
|
||||
getLatestVersion,
|
||||
getDatabase,
|
||||
} = require('./migrations/migrations');
|
||||
/* global window, Whisper, setTimeout */
|
||||
|
||||
const MESSAGE_MINIMUM_VERSION = 7;
|
||||
|
||||
module.exports = {
|
||||
doesDatabaseExist,
|
||||
mandatoryMessageUpgrade,
|
||||
MESSAGE_MINIMUM_VERSION,
|
||||
migrateAllToSQLCipher,
|
||||
removeDatabase,
|
||||
runMigrations,
|
||||
};
|
||||
|
||||
async function runMigrations() {
|
||||
window.log.info('Run migrations on database with attachment data');
|
||||
await run({
|
||||
Backbone: window.Backbone,
|
||||
logger: window.log,
|
||||
});
|
||||
|
||||
Whisper.Database.migrations[0].version = getLatestVersion();
|
||||
}
|
||||
|
||||
async function mandatoryMessageUpgrade({ upgradeMessageSchema } = {}) {
|
||||
if (!isFunction(upgradeMessageSchema)) {
|
||||
throw new Error(
|
||||
'mandatoryMessageUpgrade: upgradeMessageSchema must be a function!'
|
||||
);
|
||||
}
|
||||
|
||||
const NUM_MESSAGES_PER_BATCH = 10;
|
||||
window.log.info(
|
||||
'upgradeMessages: Mandatory message schema upgrade started.',
|
||||
`Target version: ${MESSAGE_MINIMUM_VERSION}`
|
||||
);
|
||||
|
||||
let isMigrationWithoutIndexComplete = false;
|
||||
while (!isMigrationWithoutIndexComplete) {
|
||||
const database = getDatabase();
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
|
||||
{
|
||||
databaseName: database.name,
|
||||
minDatabaseVersion: database.version,
|
||||
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
|
||||
upgradeMessageSchema,
|
||||
maxVersion: MESSAGE_MINIMUM_VERSION,
|
||||
BackboneMessage: Whisper.Message,
|
||||
saveMessage: window.Signal.Data.saveLegacyMessage,
|
||||
}
|
||||
);
|
||||
window.log.info(
|
||||
'upgradeMessages: upgrade without index',
|
||||
batchWithoutIndex
|
||||
);
|
||||
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
|
||||
}
|
||||
window.log.info('upgradeMessages: upgrade without index complete!');
|
||||
|
||||
let isMigrationWithIndexComplete = false;
|
||||
while (!isMigrationWithIndexComplete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const batchWithIndex = await MessageDataMigrator.processNext({
|
||||
BackboneMessage: Whisper.Message,
|
||||
BackboneMessageCollection: Whisper.MessageCollection,
|
||||
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
|
||||
upgradeMessageSchema,
|
||||
getMessagesNeedingUpgrade:
|
||||
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
|
||||
saveMessage: window.Signal.Data.saveLegacyMessage,
|
||||
maxVersion: MESSAGE_MINIMUM_VERSION,
|
||||
});
|
||||
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
|
||||
isMigrationWithIndexComplete = batchWithIndex.done;
|
||||
}
|
||||
window.log.info('upgradeMessages: upgrade with index complete!');
|
||||
|
||||
window.log.info('upgradeMessages: Message schema upgrade complete');
|
||||
}
|
||||
|
||||
async function migrateAllToSQLCipher({ writeNewAttachmentData, Views } = {}) {
|
||||
if (!isFunction(writeNewAttachmentData)) {
|
||||
throw new Error(
|
||||
'migrateAllToSQLCipher: writeNewAttachmentData must be a function'
|
||||
);
|
||||
}
|
||||
if (!Views) {
|
||||
throw new Error('migrateAllToSQLCipher: Views must be provided!');
|
||||
}
|
||||
|
||||
let totalMessages;
|
||||
const db = await Whisper.Database.open();
|
||||
|
||||
function showMigrationStatus(current) {
|
||||
const status = `${current}/${totalMessages}`;
|
||||
Views.Initialization.setMessage(
|
||||
window.i18n('migratingToSQLCipher', [status])
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
totalMessages = await MessageDataMigrator.getNumMessages({
|
||||
connection: db,
|
||||
});
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'background.getNumMessages error:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
totalMessages = 0;
|
||||
}
|
||||
|
||||
if (totalMessages) {
|
||||
window.log.info(`About to migrate ${totalMessages} messages`);
|
||||
showMigrationStatus(0);
|
||||
} else {
|
||||
window.log.info('About to migrate non-messages');
|
||||
}
|
||||
|
||||
await window.Signal.migrateToSQL({
|
||||
db,
|
||||
clearStores: Whisper.Database.clearStores,
|
||||
handleDOMException: Whisper.Database.handleDOMException,
|
||||
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
|
||||
countCallback: count => {
|
||||
window.log.info(`Migration: ${count} messages complete`);
|
||||
showMigrationStatus(count);
|
||||
},
|
||||
writeNewAttachmentData,
|
||||
});
|
||||
|
||||
db.close();
|
||||
}
|
||||
|
||||
async function doesDatabaseExist() {
|
||||
window.log.info('Checking for the existence of IndexedDB data...');
|
||||
return new Promise((resolve, reject) => {
|
||||
|
|
|
@ -1,18 +1,10 @@
|
|||
// Module to upgrade the schema of messages, e.g. migrate attachments to disk.
|
||||
// `dangerouslyProcessAllWithoutIndex` purposely doesn’t rely on our Backbone
|
||||
// IndexedDB adapter to prevent automatic migrations. Rather, it uses direct
|
||||
// IndexedDB access. This includes avoiding usage of `storage` module which uses
|
||||
// Backbone under the hood.
|
||||
// Ensures that messages in database are at the right schema.
|
||||
|
||||
/* global IDBKeyRange, window */
|
||||
/* global window */
|
||||
|
||||
const { isFunction, isNumber, isObject, isString, last } = require('lodash');
|
||||
const { isFunction, isNumber } = require('lodash');
|
||||
|
||||
const database = require('./database');
|
||||
const Message = require('./types/message');
|
||||
const settings = require('./settings');
|
||||
|
||||
const MESSAGES_STORE_NAME = 'messages';
|
||||
|
||||
exports.processNext = async ({
|
||||
BackboneMessage,
|
||||
|
@ -96,310 +88,3 @@ exports.processNext = async ({
|
|||
totalDuration,
|
||||
};
|
||||
};
|
||||
|
||||
exports.dangerouslyProcessAllWithoutIndex = async ({
|
||||
databaseName,
|
||||
minDatabaseVersion,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
logger,
|
||||
maxVersion = Message.CURRENT_SCHEMA_VERSION,
|
||||
saveMessage,
|
||||
BackboneMessage,
|
||||
} = {}) => {
|
||||
if (!isString(databaseName)) {
|
||||
throw new TypeError("'databaseName' must be a string");
|
||||
}
|
||||
|
||||
if (!isNumber(minDatabaseVersion)) {
|
||||
throw new TypeError("'minDatabaseVersion' must be a number");
|
||||
}
|
||||
|
||||
if (!isNumber(numMessagesPerBatch)) {
|
||||
throw new TypeError("'numMessagesPerBatch' must be a number");
|
||||
}
|
||||
if (!isFunction(upgradeMessageSchema)) {
|
||||
throw new TypeError("'upgradeMessageSchema' is required");
|
||||
}
|
||||
if (!isFunction(BackboneMessage)) {
|
||||
throw new TypeError("'upgradeMessageSchema' is required");
|
||||
}
|
||||
if (!isFunction(saveMessage)) {
|
||||
throw new TypeError("'upgradeMessageSchema' is required");
|
||||
}
|
||||
|
||||
const connection = await database.open(databaseName);
|
||||
const databaseVersion = connection.version;
|
||||
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
|
||||
logger.info('Database status', {
|
||||
databaseVersion,
|
||||
isValidDatabaseVersion,
|
||||
minDatabaseVersion,
|
||||
});
|
||||
if (!isValidDatabaseVersion) {
|
||||
throw new Error(
|
||||
`Expected database version (${databaseVersion})` +
|
||||
` to be at least ${minDatabaseVersion}`
|
||||
);
|
||||
}
|
||||
|
||||
// NOTE: Even if we make this async using `then`, requesting `count` on an
|
||||
// IndexedDB store blocks all subsequent transactions, so we might as well
|
||||
// explicitly wait for it here:
|
||||
const numTotalMessages = await exports.getNumMessages({ connection });
|
||||
|
||||
const migrationStartTime = Date.now();
|
||||
let numCumulativeMessagesProcessed = 0;
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await _processBatch({
|
||||
connection,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
maxVersion,
|
||||
saveMessage,
|
||||
BackboneMessage,
|
||||
});
|
||||
if (status.done) {
|
||||
break;
|
||||
}
|
||||
numCumulativeMessagesProcessed += status.numMessagesProcessed;
|
||||
logger.info(
|
||||
'Upgrade message schema:',
|
||||
Object.assign({}, status, {
|
||||
numTotalMessages,
|
||||
numCumulativeMessagesProcessed,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
logger.info('Close database connection');
|
||||
connection.close();
|
||||
|
||||
const totalDuration = Date.now() - migrationStartTime;
|
||||
logger.info('Attachment migration complete:', {
|
||||
totalDuration,
|
||||
totalMessagesProcessed: numCumulativeMessagesProcessed,
|
||||
});
|
||||
};
|
||||
|
||||
exports.processNextBatchWithoutIndex = async ({
|
||||
databaseName,
|
||||
minDatabaseVersion,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
maxVersion,
|
||||
BackboneMessage,
|
||||
saveMessage,
|
||||
} = {}) => {
|
||||
if (!isFunction(upgradeMessageSchema)) {
|
||||
throw new TypeError("'upgradeMessageSchema' is required");
|
||||
}
|
||||
|
||||
const connection = await _getConnection({ databaseName, minDatabaseVersion });
|
||||
const batch = await _processBatch({
|
||||
connection,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
maxVersion,
|
||||
BackboneMessage,
|
||||
saveMessage,
|
||||
});
|
||||
return batch;
|
||||
};
|
||||
|
||||
// Private API
|
||||
const _getConnection = async ({ databaseName, minDatabaseVersion }) => {
|
||||
if (!isString(databaseName)) {
|
||||
throw new TypeError("'databaseName' must be a string");
|
||||
}
|
||||
|
||||
if (!isNumber(minDatabaseVersion)) {
|
||||
throw new TypeError("'minDatabaseVersion' must be a number");
|
||||
}
|
||||
|
||||
const connection = await database.open(databaseName);
|
||||
const databaseVersion = connection.version;
|
||||
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
|
||||
if (!isValidDatabaseVersion) {
|
||||
throw new Error(
|
||||
`Expected database version (${databaseVersion})` +
|
||||
` to be at least ${minDatabaseVersion}`
|
||||
);
|
||||
}
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const _processBatch = async ({
|
||||
connection,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
maxVersion,
|
||||
BackboneMessage,
|
||||
saveMessage,
|
||||
} = {}) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError('_processBatch: connection must be a string');
|
||||
}
|
||||
|
||||
if (!isFunction(upgradeMessageSchema)) {
|
||||
throw new TypeError('_processBatch: upgradeMessageSchema is required');
|
||||
}
|
||||
|
||||
if (!isNumber(numMessagesPerBatch)) {
|
||||
throw new TypeError('_processBatch: numMessagesPerBatch is required');
|
||||
}
|
||||
if (!isNumber(maxVersion)) {
|
||||
throw new TypeError('_processBatch: maxVersion is required');
|
||||
}
|
||||
if (!isFunction(BackboneMessage)) {
|
||||
throw new TypeError('_processBatch: BackboneMessage is required');
|
||||
}
|
||||
if (!isFunction(saveMessage)) {
|
||||
throw new TypeError('_processBatch: saveMessage is required');
|
||||
}
|
||||
|
||||
const isAttachmentMigrationComplete = await settings.isAttachmentMigrationComplete(
|
||||
connection
|
||||
);
|
||||
if (isAttachmentMigrationComplete) {
|
||||
return {
|
||||
done: true,
|
||||
};
|
||||
}
|
||||
|
||||
const lastProcessedIndex = await settings.getAttachmentMigrationLastProcessedIndex(
|
||||
connection
|
||||
);
|
||||
|
||||
const fetchUnprocessedMessagesStartTime = Date.now();
|
||||
let unprocessedMessages;
|
||||
try {
|
||||
unprocessedMessages = await _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex(
|
||||
{
|
||||
connection,
|
||||
count: numMessagesPerBatch,
|
||||
lastIndex: lastProcessedIndex,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'_processBatch error:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
await settings.markAttachmentMigrationComplete(connection);
|
||||
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
|
||||
return {
|
||||
done: true,
|
||||
};
|
||||
}
|
||||
const fetchDuration = Date.now() - fetchUnprocessedMessagesStartTime;
|
||||
|
||||
const upgradeStartTime = Date.now();
|
||||
const upgradedMessages = await Promise.all(
|
||||
unprocessedMessages.map(message =>
|
||||
upgradeMessageSchema(message, { maxVersion })
|
||||
)
|
||||
);
|
||||
const upgradeDuration = Date.now() - upgradeStartTime;
|
||||
|
||||
const saveMessagesStartTime = Date.now();
|
||||
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readwrite');
|
||||
const transactionCompletion = database.completeTransaction(transaction);
|
||||
await Promise.all(
|
||||
upgradedMessages.map(message =>
|
||||
saveMessage(message, { Message: BackboneMessage })
|
||||
)
|
||||
);
|
||||
await transactionCompletion;
|
||||
const saveDuration = Date.now() - saveMessagesStartTime;
|
||||
|
||||
const numMessagesProcessed = upgradedMessages.length;
|
||||
const done = numMessagesProcessed < numMessagesPerBatch;
|
||||
const lastMessage = last(upgradedMessages);
|
||||
const newLastProcessedIndex = lastMessage ? lastMessage.id : null;
|
||||
if (!done) {
|
||||
await settings.setAttachmentMigrationLastProcessedIndex(
|
||||
connection,
|
||||
newLastProcessedIndex
|
||||
);
|
||||
} else {
|
||||
await settings.markAttachmentMigrationComplete(connection);
|
||||
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
|
||||
}
|
||||
|
||||
const batchTotalDuration = Date.now() - fetchUnprocessedMessagesStartTime;
|
||||
|
||||
return {
|
||||
batchTotalDuration,
|
||||
done,
|
||||
fetchDuration,
|
||||
lastProcessedIndex,
|
||||
newLastProcessedIndex,
|
||||
numMessagesProcessed,
|
||||
saveDuration,
|
||||
targetSchemaVersion: Message.CURRENT_SCHEMA_VERSION,
|
||||
upgradeDuration,
|
||||
};
|
||||
};
|
||||
|
||||
// NOTE: Named ‘dangerous’ because it is not as efficient as using our
|
||||
// `messages` `schemaVersion` index:
|
||||
const _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex = ({
|
||||
connection,
|
||||
count,
|
||||
lastIndex,
|
||||
} = {}) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError("'connection' is required");
|
||||
}
|
||||
|
||||
if (!isNumber(count)) {
|
||||
throw new TypeError("'count' is required");
|
||||
}
|
||||
|
||||
if (lastIndex && !isString(lastIndex)) {
|
||||
throw new TypeError("'lastIndex' must be a string");
|
||||
}
|
||||
|
||||
const hasLastIndex = Boolean(lastIndex);
|
||||
|
||||
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
|
||||
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
|
||||
|
||||
const excludeLowerBound = true;
|
||||
const range = hasLastIndex
|
||||
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
|
||||
: undefined;
|
||||
return new Promise((resolve, reject) => {
|
||||
const items = [];
|
||||
const request = messagesStore.openCursor(range);
|
||||
request.onsuccess = event => {
|
||||
const cursor = event.target.result;
|
||||
const hasMoreData = Boolean(cursor);
|
||||
if (!hasMoreData || items.length === count) {
|
||||
resolve(items);
|
||||
return;
|
||||
}
|
||||
const item = cursor.value;
|
||||
items.push(item);
|
||||
cursor.continue();
|
||||
};
|
||||
request.onerror = event => reject(event.target.error);
|
||||
});
|
||||
};
|
||||
|
||||
exports.getNumMessages = async ({ connection } = {}) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError("'connection' is required");
|
||||
}
|
||||
|
||||
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
|
||||
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
|
||||
const numTotalMessages = await database.getCount({ store: messagesStore });
|
||||
await database.completeTransaction(transaction);
|
||||
|
||||
return numTotalMessages;
|
||||
};
|
||||
|
|
|
@ -1,409 +0,0 @@
|
|||
/* global window, IDBKeyRange */
|
||||
|
||||
const { includes, isFunction, isString, last, map } = require('lodash');
|
||||
const {
|
||||
bulkAddSessions,
|
||||
bulkAddIdentityKeys,
|
||||
bulkAddPreKeys,
|
||||
bulkAddSignedPreKeys,
|
||||
bulkAddItems,
|
||||
|
||||
removeSessionById,
|
||||
removeIdentityKeyById,
|
||||
removePreKeyById,
|
||||
removeSignedPreKeyById,
|
||||
removeItemById,
|
||||
|
||||
saveMessages,
|
||||
_removeMessages,
|
||||
|
||||
saveUnprocesseds,
|
||||
removeUnprocessed,
|
||||
|
||||
saveConversations,
|
||||
_removeConversations,
|
||||
} = require('../../ts/sql/Client').default;
|
||||
const {
|
||||
getMessageExportLastIndex,
|
||||
setMessageExportLastIndex,
|
||||
getMessageExportCount,
|
||||
setMessageExportCount,
|
||||
getUnprocessedExportLastIndex,
|
||||
setUnprocessedExportLastIndex,
|
||||
} = require('./settings');
|
||||
const { migrateConversation } = require('./types/conversation');
|
||||
|
||||
module.exports = {
|
||||
migrateToSQL,
|
||||
};
|
||||
|
||||
async function migrateToSQL({
|
||||
db,
|
||||
clearStores,
|
||||
handleDOMException,
|
||||
countCallback,
|
||||
arrayBufferToString,
|
||||
writeNewAttachmentData,
|
||||
}) {
|
||||
if (!db) {
|
||||
throw new Error('Need db for IndexedDB connection!');
|
||||
}
|
||||
if (!isFunction(clearStores)) {
|
||||
throw new Error('Need clearStores function!');
|
||||
}
|
||||
if (!isFunction(arrayBufferToString)) {
|
||||
throw new Error('Need arrayBufferToString function!');
|
||||
}
|
||||
if (!isFunction(handleDOMException)) {
|
||||
throw new Error('Need handleDOMException function!');
|
||||
}
|
||||
|
||||
window.log.info('migrateToSQL: start');
|
||||
|
||||
let [lastIndex, doneSoFar] = await Promise.all([
|
||||
getMessageExportLastIndex(db),
|
||||
getMessageExportCount(db),
|
||||
]);
|
||||
let complete = false;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
save: saveMessages,
|
||||
remove: _removeMessages,
|
||||
storeName: 'messages',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await Promise.all([
|
||||
setMessageExportCount(db, doneSoFar),
|
||||
setMessageExportLastIndex(db, lastIndex),
|
||||
]);
|
||||
|
||||
const { count } = status;
|
||||
doneSoFar += count;
|
||||
if (countCallback) {
|
||||
countCallback(doneSoFar);
|
||||
}
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of messages complete');
|
||||
try {
|
||||
await clearStores(['messages']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear messages store');
|
||||
}
|
||||
|
||||
lastIndex = await getUnprocessedExportLastIndex(db);
|
||||
complete = false;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
save: async array => {
|
||||
await Promise.all(
|
||||
map(array, async item => {
|
||||
// In the new database, we can't store ArrayBuffers, so we turn these two
|
||||
// fields into strings like MessageReceiver now does before save.
|
||||
|
||||
// Need to set it to version two, since we're using Base64 strings now
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
item.version = 2;
|
||||
|
||||
if (item.envelope) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
item.envelope = arrayBufferToString(item.envelope);
|
||||
}
|
||||
if (item.decrypted) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
item.decrypted = arrayBufferToString(item.decrypted);
|
||||
}
|
||||
})
|
||||
);
|
||||
await saveUnprocesseds(array);
|
||||
},
|
||||
remove: removeUnprocessed,
|
||||
storeName: 'unprocessed',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await setUnprocessedExportLastIndex(db, lastIndex);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of unprocessed complete');
|
||||
try {
|
||||
await clearStores(['unprocessed']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear unprocessed store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: async array => {
|
||||
const conversations = await Promise.all(
|
||||
map(array, async conversation =>
|
||||
migrateConversation(conversation, { writeNewAttachmentData })
|
||||
)
|
||||
);
|
||||
|
||||
saveConversations(conversations);
|
||||
},
|
||||
remove: _removeConversations,
|
||||
storeName: 'conversations',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
// Because we're doing real-time moves to the filesystem, minimize parallelism
|
||||
batchSize: 5,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of conversations complete');
|
||||
try {
|
||||
await clearStores(['conversations']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear conversations store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddSessions,
|
||||
remove: removeSessionById,
|
||||
storeName: 'sessions',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of sessions complete');
|
||||
try {
|
||||
await clearStores(['sessions']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear sessions store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddIdentityKeys,
|
||||
remove: removeIdentityKeyById,
|
||||
storeName: 'identityKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of identityKeys complete');
|
||||
try {
|
||||
await clearStores(['identityKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear identityKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddPreKeys,
|
||||
remove: removePreKeyById,
|
||||
storeName: 'preKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of preKeys complete');
|
||||
try {
|
||||
await clearStores(['preKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear preKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddSignedPreKeys,
|
||||
remove: removeSignedPreKeyById,
|
||||
storeName: 'signedPreKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of signedPreKeys complete');
|
||||
try {
|
||||
await clearStores(['signedPreKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear signedPreKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddItems,
|
||||
remove: removeItemById,
|
||||
storeName: 'items',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of items complete');
|
||||
// Note: we don't clear the items store because it contains important metadata which,
|
||||
// if this process fails, will be crucial to going through this process again.
|
||||
|
||||
window.log.info('migrateToSQL: complete');
|
||||
}
|
||||
|
||||
async function migrateStoreToSQLite({
|
||||
db,
|
||||
save,
|
||||
remove,
|
||||
storeName,
|
||||
handleDOMException,
|
||||
lastIndex = null,
|
||||
batchSize = 50,
|
||||
}) {
|
||||
if (!db) {
|
||||
throw new Error('Need db for IndexedDB connection!');
|
||||
}
|
||||
if (!isFunction(save)) {
|
||||
throw new Error('Need save function!');
|
||||
}
|
||||
if (!isFunction(remove)) {
|
||||
throw new Error('Need remove function!');
|
||||
}
|
||||
if (!isString(storeName)) {
|
||||
throw new Error('Need storeName!');
|
||||
}
|
||||
if (!isFunction(handleDOMException)) {
|
||||
throw new Error('Need handleDOMException for error handling!');
|
||||
}
|
||||
|
||||
if (!includes(db.objectStoreNames, storeName)) {
|
||||
return {
|
||||
complete: true,
|
||||
count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const queryPromise = new Promise((resolve, reject) => {
|
||||
const items = [];
|
||||
const transaction = db.transaction(storeName, 'readonly');
|
||||
transaction.onerror = () => {
|
||||
handleDOMException(
|
||||
'migrateToSQLite transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {};
|
||||
|
||||
const store = transaction.objectStore(storeName);
|
||||
const excludeLowerBound = true;
|
||||
const range = lastIndex
|
||||
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
|
||||
: undefined;
|
||||
const request = store.openCursor(range);
|
||||
request.onerror = () => {
|
||||
handleDOMException(
|
||||
'migrateToSQLite: request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
request.onsuccess = event => {
|
||||
const cursor = event.target.result;
|
||||
|
||||
if (!cursor || !cursor.value) {
|
||||
return resolve({
|
||||
complete: true,
|
||||
items,
|
||||
});
|
||||
}
|
||||
|
||||
const item = cursor.value;
|
||||
items.push(item);
|
||||
|
||||
if (items.length >= batchSize) {
|
||||
return resolve({
|
||||
complete: false,
|
||||
items,
|
||||
});
|
||||
}
|
||||
|
||||
return cursor.continue();
|
||||
};
|
||||
});
|
||||
|
||||
const { items, complete } = await queryPromise;
|
||||
|
||||
if (items.length) {
|
||||
// Because of the force save and some failed imports, we're going to delete before
|
||||
// we attempt to insert.
|
||||
const ids = items.map(item => item.id);
|
||||
await remove(ids);
|
||||
|
||||
// We need to pass forceSave parameter, because these items already have an
|
||||
// id key. Normally, this call would be interpreted as an update request.
|
||||
await save(items, { forceSave: true });
|
||||
}
|
||||
|
||||
const lastItem = last(items);
|
||||
const id = lastItem ? lastItem.id : null;
|
||||
|
||||
return {
|
||||
complete,
|
||||
count: items.length,
|
||||
lastIndex: id,
|
||||
};
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
exports.run = ({ transaction, logger }) => {
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
|
||||
logger.info("Create message attachment metadata index: 'hasAttachments'");
|
||||
messagesStore.createIndex(
|
||||
'hasAttachments',
|
||||
['conversationId', 'hasAttachments', 'received_at'],
|
||||
{ unique: false }
|
||||
);
|
||||
|
||||
['hasVisualMediaAttachments', 'hasFileAttachments'].forEach(name => {
|
||||
logger.info(`Create message attachment metadata index: '${name}'`);
|
||||
messagesStore.createIndex(name, ['conversationId', 'received_at', name], {
|
||||
unique: false,
|
||||
});
|
||||
});
|
||||
};
|
|
@ -1,35 +0,0 @@
|
|||
/* global window, Whisper */
|
||||
|
||||
const Migrations = require('./migrations');
|
||||
|
||||
exports.getPlaceholderMigrations = () => {
|
||||
const version = Migrations.getLatestVersion();
|
||||
|
||||
return [
|
||||
{
|
||||
version,
|
||||
migrate() {
|
||||
throw new Error(
|
||||
'Unexpected invocation of placeholder migration!' +
|
||||
'\n\nMigrations must explicitly be run upon application startup instead' +
|
||||
' of implicitly via Backbone IndexedDB adapter at any time.'
|
||||
);
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
exports.getCurrentVersion = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const request = window.indexedDB.open(Whisper.Database.id);
|
||||
|
||||
request.onerror = reject;
|
||||
request.onupgradeneeded = reject;
|
||||
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
const { version } = db;
|
||||
|
||||
return resolve(version);
|
||||
};
|
||||
});
|
|
@ -1,221 +0,0 @@
|
|||
/* global window */
|
||||
|
||||
const { isString, last } = require('lodash');
|
||||
|
||||
const { runMigrations } = require('./run_migrations');
|
||||
const Migration18 = require('./18');
|
||||
|
||||
// IMPORTANT: The migrations below are run on a database that may be very large
|
||||
// due to attachments being directly stored inside the database. Please avoid
|
||||
// any expensive operations, e.g. modifying all messages / attachments, etc., as
|
||||
// it may cause out-of-memory errors for users with long histories:
|
||||
// https://github.com/signalapp/Signal-Desktop/issues/2163
|
||||
const migrations = [
|
||||
{
|
||||
version: '12.0',
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 12');
|
||||
window.log.info('creating object stores');
|
||||
const messages = transaction.db.createObjectStore('messages');
|
||||
messages.createIndex('conversation', ['conversationId', 'received_at'], {
|
||||
unique: false,
|
||||
});
|
||||
messages.createIndex('receipt', 'sent_at', { unique: false });
|
||||
messages.createIndex('unread', ['conversationId', 'unread'], {
|
||||
unique: false,
|
||||
});
|
||||
messages.createIndex('expires_at', 'expires_at', { unique: false });
|
||||
|
||||
const conversations = transaction.db.createObjectStore('conversations');
|
||||
conversations.createIndex('inbox', 'active_at', { unique: false });
|
||||
conversations.createIndex('group', 'members', {
|
||||
unique: false,
|
||||
multiEntry: true,
|
||||
});
|
||||
conversations.createIndex('type', 'type', {
|
||||
unique: false,
|
||||
});
|
||||
conversations.createIndex('search', 'tokens', {
|
||||
unique: false,
|
||||
multiEntry: true,
|
||||
});
|
||||
|
||||
transaction.db.createObjectStore('groups');
|
||||
|
||||
transaction.db.createObjectStore('sessions');
|
||||
transaction.db.createObjectStore('identityKeys');
|
||||
transaction.db.createObjectStore('preKeys');
|
||||
transaction.db.createObjectStore('signedPreKeys');
|
||||
transaction.db.createObjectStore('items');
|
||||
|
||||
window.log.info('creating debug log');
|
||||
transaction.db.createObjectStore('debug');
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '13.0',
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 13');
|
||||
window.log.info('Adding fields to identity keys');
|
||||
const identityKeys = transaction.objectStore('identityKeys');
|
||||
const request = identityKeys.openCursor();
|
||||
const promises = [];
|
||||
request.onsuccess = event => {
|
||||
const cursor = event.target.result;
|
||||
if (cursor) {
|
||||
const attributes = cursor.value;
|
||||
attributes.timestamp = 0;
|
||||
attributes.firstUse = false;
|
||||
attributes.nonblockingApproval = false;
|
||||
attributes.verified = 0;
|
||||
promises.push(
|
||||
new Promise((resolve, reject) => {
|
||||
const putRequest = identityKeys.put(attributes, attributes.id);
|
||||
putRequest.onsuccess = resolve;
|
||||
putRequest.onerror = error => {
|
||||
window.log.error(error && error.stack ? error.stack : error);
|
||||
reject(error);
|
||||
};
|
||||
})
|
||||
);
|
||||
cursor.continue();
|
||||
} else {
|
||||
// no more results
|
||||
// eslint-disable-next-line more/no-then
|
||||
Promise.all(promises).then(() => {
|
||||
next();
|
||||
});
|
||||
}
|
||||
};
|
||||
request.onerror = event => {
|
||||
window.log.error(event);
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '14.0',
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 14');
|
||||
window.log.info('Adding unprocessed message store');
|
||||
const unprocessed = transaction.db.createObjectStore('unprocessed');
|
||||
unprocessed.createIndex('received', 'timestamp', { unique: false });
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '15.0',
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 15');
|
||||
window.log.info('Adding messages index for de-duplication');
|
||||
const messages = transaction.objectStore('messages');
|
||||
messages.createIndex('unique', ['source', 'sourceDevice', 'sent_at'], {
|
||||
unique: true,
|
||||
});
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '16.0',
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 16');
|
||||
window.log.info('Dropping log table, since we now log to disk');
|
||||
transaction.db.deleteObjectStore('debug');
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 17,
|
||||
async migrate(transaction, next) {
|
||||
window.log.info('Migration 17');
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
window.log.info(
|
||||
'Create index from attachment schema version to attachment'
|
||||
);
|
||||
messagesStore.createIndex('schemaVersion', 'schemaVersion', {
|
||||
unique: false,
|
||||
});
|
||||
|
||||
const duration = Date.now() - start;
|
||||
|
||||
window.log.info(
|
||||
'Complete migration to database version 17',
|
||||
`Duration: ${duration}ms`
|
||||
);
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 18,
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 18');
|
||||
|
||||
const start = Date.now();
|
||||
Migration18.run({ transaction, logger: window.log });
|
||||
const duration = Date.now() - start;
|
||||
|
||||
window.log.info(
|
||||
'Complete migration to database version 18',
|
||||
`Duration: ${duration}ms`
|
||||
);
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 19,
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 19');
|
||||
|
||||
// Empty because we don't want to cause incompatibility with beta users who have
|
||||
// already run migration 19 when it was object store removal.
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 20,
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 20');
|
||||
|
||||
// Empty because we don't want to cause incompatibility with users who have already
|
||||
// run migration 20 when it was object store removal.
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const database = {
|
||||
id: 'signal',
|
||||
nolog: true,
|
||||
migrations,
|
||||
};
|
||||
|
||||
exports.run = ({ Backbone, databaseName, logger } = {}) =>
|
||||
runMigrations({
|
||||
Backbone,
|
||||
logger,
|
||||
database: Object.assign(
|
||||
{},
|
||||
database,
|
||||
isString(databaseName) ? { id: databaseName } : {}
|
||||
),
|
||||
});
|
||||
|
||||
exports.getDatabase = () => ({
|
||||
name: database.id,
|
||||
version: exports.getLatestVersion(),
|
||||
});
|
||||
|
||||
exports.getLatestVersion = () => {
|
||||
const lastMigration = last(migrations);
|
||||
if (!lastMigration) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return lastMigration.version;
|
||||
};
|
|
@ -1,79 +0,0 @@
|
|||
/* eslint-env browser */
|
||||
|
||||
const { head, isFunction, isObject, isString, last } = require('lodash');
|
||||
|
||||
const db = require('../database');
|
||||
const { deferredToPromise } = require('../deferred_to_promise');
|
||||
|
||||
const closeDatabaseConnection = ({ Backbone } = {}) =>
|
||||
deferredToPromise(Backbone.sync('closeall'));
|
||||
|
||||
exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
|
||||
if (
|
||||
!isObject(Backbone) ||
|
||||
!isObject(Backbone.Collection) ||
|
||||
!isFunction(Backbone.Collection.extend)
|
||||
) {
|
||||
throw new TypeError('runMigrations: Backbone is required');
|
||||
}
|
||||
|
||||
if (
|
||||
!isObject(database) ||
|
||||
!isString(database.id) ||
|
||||
!Array.isArray(database.migrations)
|
||||
) {
|
||||
throw new TypeError('runMigrations: database is required');
|
||||
}
|
||||
if (!isObject(logger)) {
|
||||
throw new TypeError('runMigrations: logger is required');
|
||||
}
|
||||
|
||||
const {
|
||||
firstVersion: firstMigrationVersion,
|
||||
lastVersion: lastMigrationVersion,
|
||||
} = getMigrationVersions(database);
|
||||
|
||||
const databaseVersion = await db.getVersion(database.id);
|
||||
const isAlreadyUpgraded = databaseVersion >= lastMigrationVersion;
|
||||
|
||||
logger.info('Database status', {
|
||||
firstMigrationVersion,
|
||||
lastMigrationVersion,
|
||||
databaseVersion,
|
||||
isAlreadyUpgraded,
|
||||
});
|
||||
|
||||
if (isAlreadyUpgraded) {
|
||||
return;
|
||||
}
|
||||
|
||||
const migrationCollection = new (Backbone.Collection.extend({
|
||||
database,
|
||||
storeName: 'items',
|
||||
}))();
|
||||
|
||||
// Note: this legacy migration technique is required to bring old clients with
|
||||
// data in IndexedDB forward into the new world of SQLCipher only.
|
||||
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
|
||||
|
||||
logger.info('Close database connection');
|
||||
await closeDatabaseConnection({ Backbone });
|
||||
};
|
||||
|
||||
const getMigrationVersions = database => {
|
||||
if (!isObject(database) || !Array.isArray(database.migrations)) {
|
||||
throw new TypeError("'database' is required");
|
||||
}
|
||||
|
||||
const firstMigration = head(database.migrations);
|
||||
const lastMigration = last(database.migrations);
|
||||
|
||||
const firstVersion = firstMigration
|
||||
? parseInt(firstMigration.version, 10)
|
||||
: null;
|
||||
const lastVersion = lastMigration
|
||||
? parseInt(lastMigration.version, 10)
|
||||
: null;
|
||||
|
||||
return { firstVersion, lastVersion };
|
||||
};
|
|
@ -4,7 +4,6 @@ const { bindActionCreators } = require('redux');
|
|||
const Backbone = require('../../ts/backbone');
|
||||
const Crypto = require('../../ts/Crypto');
|
||||
const Data = require('../../ts/sql/Client').default;
|
||||
const Database = require('./database');
|
||||
const Emojis = require('./emojis');
|
||||
const EmojiLib = require('../../ts/components/emoji/lib');
|
||||
const IndexedDB = require('./indexeddb');
|
||||
|
@ -13,7 +12,6 @@ const OS = require('../../ts/OS');
|
|||
const Stickers = require('./stickers');
|
||||
const Settings = require('./settings');
|
||||
const Util = require('../../ts/util');
|
||||
const { migrateToSQL } = require('./migrate_to_sql');
|
||||
const Metadata = require('./metadata/SecretSessionCipher');
|
||||
const RefreshSenderCertificate = require('./refresh_sender_certificate');
|
||||
const LinkPreviews = require('./link_previews');
|
||||
|
@ -75,13 +73,6 @@ const userDuck = require('../../ts/state/ducks/user');
|
|||
const conversationsSelectors = require('../../ts/state/selectors/conversations');
|
||||
const searchSelectors = require('../../ts/state/selectors/search');
|
||||
|
||||
// Migrations
|
||||
const {
|
||||
getPlaceholderMigrations,
|
||||
getCurrentVersion,
|
||||
} = require('./migrations/get_placeholder_migrations');
|
||||
const { run } = require('./migrations/migrations');
|
||||
|
||||
// Types
|
||||
const AttachmentType = require('./types/attachment');
|
||||
const VisualAttachment = require('./types/visual_attachment');
|
||||
|
@ -193,8 +184,6 @@ function initializeMigrations({
|
|||
getAbsoluteDraftPath,
|
||||
getAbsoluteStickerPath,
|
||||
getAbsoluteTempPath,
|
||||
getPlaceholderMigrations,
|
||||
getCurrentVersion,
|
||||
loadAttachmentData,
|
||||
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
|
||||
loadPreviewData,
|
||||
|
@ -205,7 +194,6 @@ function initializeMigrations({
|
|||
readDraftData,
|
||||
readStickerData,
|
||||
readTempData,
|
||||
run,
|
||||
saveAttachmentToDisk,
|
||||
processNewAttachment: attachment =>
|
||||
MessageType.processNewAttachment(attachment, {
|
||||
|
@ -353,13 +341,11 @@ exports.setup = (options = {}) => {
|
|||
Components,
|
||||
Crypto,
|
||||
Data,
|
||||
Database,
|
||||
Emojis,
|
||||
EmojiLib,
|
||||
IndexedDB,
|
||||
LinkPreviews,
|
||||
Metadata,
|
||||
migrateToSQL,
|
||||
Migrations,
|
||||
Notifications,
|
||||
OS,
|
||||
|
|
|
@ -58,7 +58,7 @@
|
|||
cancel() {
|
||||
this.remove();
|
||||
if (this.reject) {
|
||||
this.reject();
|
||||
this.reject(new Error('User clicked cancel button'));
|
||||
}
|
||||
},
|
||||
onKeydown(event) {
|
||||
|
|
3
main.js
3
main.js
|
@ -1095,6 +1095,9 @@ ipc.on('restart', () => {
|
|||
app.relaunch();
|
||||
app.quit();
|
||||
});
|
||||
ipc.on('shutdown', () => {
|
||||
app.quit();
|
||||
});
|
||||
|
||||
ipc.on('set-auto-hide-menu-bar', (event, autoHide) => {
|
||||
if (mainWindow) {
|
||||
|
|
|
@ -9,8 +9,6 @@ try {
|
|||
const _ = require('lodash');
|
||||
const { installGetter, installSetter } = require('./preload_utils');
|
||||
|
||||
const { deferredToPromise } = require('./js/modules/deferred_to_promise');
|
||||
|
||||
const { remote } = electron;
|
||||
const { app } = remote;
|
||||
const { nativeTheme } = remote.require('electron');
|
||||
|
@ -66,8 +64,6 @@ try {
|
|||
}
|
||||
};
|
||||
|
||||
window.wrapDeferred = deferredToPromise;
|
||||
|
||||
const ipc = electron.ipcRenderer;
|
||||
const localeMessages = ipc.sendSync('locale-data');
|
||||
|
||||
|
@ -97,6 +93,10 @@ try {
|
|||
window.log.info('restart');
|
||||
ipc.send('restart');
|
||||
};
|
||||
window.shutdown = () => {
|
||||
window.log.info('shutdown');
|
||||
ipc.send('shutdown');
|
||||
};
|
||||
|
||||
window.closeAbout = () => ipc.send('close-about');
|
||||
window.readyForUpdates = () => ipc.send('ready-for-updates');
|
||||
|
|
|
@ -18,7 +18,6 @@ import {
|
|||
import { arrayBufferToBase64, base64ToArrayBuffer } from '../Crypto';
|
||||
import { CURRENT_SCHEMA_VERSION } from '../../js/modules/types/message';
|
||||
import { createBatcher } from '../util/batcher';
|
||||
import { v4 as getGuid } from 'uuid';
|
||||
|
||||
import {
|
||||
AttachmentDownloadJobType,
|
||||
|
@ -208,9 +207,6 @@ const dataInterface: ClientInterface = {
|
|||
cleanupOrphanedAttachments,
|
||||
ensureFilePermissions,
|
||||
|
||||
getLegacyMessagesNeedingUpgrade,
|
||||
saveLegacyMessage,
|
||||
|
||||
// Client-side only, and test-only
|
||||
|
||||
_removeConversations,
|
||||
|
@ -1330,97 +1326,3 @@ async function getMessagesWithFileAttachments(
|
|||
limit,
|
||||
});
|
||||
}
|
||||
|
||||
// Legacy IndexedDB Support
|
||||
|
||||
async function getLegacyMessagesNeedingUpgrade(
|
||||
limit: number,
|
||||
{ maxVersion = CURRENT_SCHEMA_VERSION }: { maxVersion: number }
|
||||
): Promise<MessageType> {
|
||||
const db = await window.Whisper.Database.open();
|
||||
try {
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction('messages', 'readonly');
|
||||
const messages: Array<MessageType> = [];
|
||||
|
||||
transaction.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'getLegacyMessagesNeedingUpgrade transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
resolve(messages);
|
||||
};
|
||||
|
||||
const store = transaction.objectStore('messages');
|
||||
const index = store.index('schemaVersion');
|
||||
const range = IDBKeyRange.upperBound(maxVersion, true);
|
||||
|
||||
const request = index.openCursor(range);
|
||||
let count = 0;
|
||||
|
||||
request.onsuccess = event => {
|
||||
// @ts-ignore
|
||||
const cursor = event.target.result;
|
||||
|
||||
if (cursor) {
|
||||
count += 1;
|
||||
messages.push(cursor.value);
|
||||
|
||||
if (count >= limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
cursor.continue();
|
||||
}
|
||||
};
|
||||
request.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'getLegacyMessagesNeedingUpgrade request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function saveLegacyMessage(data: MessageType) {
|
||||
const db = await window.Whisper.Database.open();
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction('messages', 'readwrite');
|
||||
|
||||
transaction.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'saveLegacyMessage transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = resolve;
|
||||
|
||||
const store = transaction.objectStore('messages');
|
||||
|
||||
if (!data.id) {
|
||||
data.id = getGuid();
|
||||
}
|
||||
|
||||
const request = store.put(data, data.id);
|
||||
request.onsuccess = resolve;
|
||||
request.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'saveLegacyMessage request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -356,12 +356,6 @@ export type ClientInterface = DataInterface & {
|
|||
cleanupOrphanedAttachments: () => Promise<void>;
|
||||
ensureFilePermissions: () => Promise<void>;
|
||||
|
||||
getLegacyMessagesNeedingUpgrade: (
|
||||
limit: number,
|
||||
options: { maxVersion: number }
|
||||
) => Promise<Array<MessageType>>;
|
||||
saveLegacyMessage: (data: MessageType) => Promise<void>;
|
||||
|
||||
// Client-side only, and test-only
|
||||
|
||||
_removeConversations: (ids: Array<string>) => Promise<void>;
|
||||
|
|
Loading…
Reference in a new issue