Move all remaining stores to SQLCipher
This commit is contained in:
parent
7aa9031c7f
commit
1755e0adfd
25 changed files with 2464 additions and 2047 deletions
|
@ -207,10 +207,10 @@ function exportContactsAndGroups(db, fileWriter) {
|
|||
});
|
||||
}
|
||||
|
||||
async function importNonMessages(db, parent, options) {
|
||||
async function importNonMessages(parent, options) {
|
||||
const file = 'db.json';
|
||||
const string = await readFileAsText(parent, file);
|
||||
return importFromJsonString(db, string, path.join(parent, file), options);
|
||||
return importFromJsonString(string, path.join(parent, file), options);
|
||||
}
|
||||
|
||||
function eliminateClientConfigInBackup(data, targetPath) {
|
||||
|
@ -265,7 +265,7 @@ async function importConversationsFromJSON(conversations, options) {
|
|||
);
|
||||
}
|
||||
|
||||
async function importFromJsonString(db, jsonString, targetPath, options) {
|
||||
async function importFromJsonString(jsonString, targetPath, options) {
|
||||
options = options || {};
|
||||
_.defaults(options, {
|
||||
forceLightImport: false,
|
||||
|
@ -278,136 +278,96 @@ async function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
fullImport: true,
|
||||
};
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const importObject = JSON.parse(jsonString);
|
||||
delete importObject.debug;
|
||||
const importObject = JSON.parse(jsonString);
|
||||
delete importObject.debug;
|
||||
|
||||
if (!importObject.sessions || options.forceLightImport) {
|
||||
result.fullImport = false;
|
||||
if (!importObject.sessions || options.forceLightImport) {
|
||||
result.fullImport = false;
|
||||
|
||||
delete importObject.items;
|
||||
delete importObject.signedPreKeys;
|
||||
delete importObject.preKeys;
|
||||
delete importObject.identityKeys;
|
||||
delete importObject.sessions;
|
||||
delete importObject.unprocessed;
|
||||
|
||||
window.log.info(
|
||||
'This is a light import; contacts, groups and messages only'
|
||||
);
|
||||
}
|
||||
|
||||
// We mutate the on-disk backup to prevent the user from importing client
|
||||
// configuration more than once - that causes lots of encryption errors.
|
||||
// This of course preserves the true data: conversations and groups.
|
||||
eliminateClientConfigInBackup(importObject, targetPath);
|
||||
|
||||
const storeNames = _.keys(importObject);
|
||||
window.log.info('Importing to these stores:', storeNames.join(', '));
|
||||
|
||||
let finished = false;
|
||||
const finish = via => {
|
||||
window.log.info('non-messages import done via', via);
|
||||
if (finished) {
|
||||
resolve(result);
|
||||
}
|
||||
finished = true;
|
||||
};
|
||||
|
||||
// Special-case conversations key here, going to SQLCipher
|
||||
const { conversations } = importObject;
|
||||
const remainingStoreNames = _.without(
|
||||
storeNames,
|
||||
'conversations',
|
||||
'unprocessed'
|
||||
);
|
||||
try {
|
||||
await importConversationsFromJSON(conversations, options);
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
|
||||
// Because the 'are we done?' check below looks at the keys remaining in importObject
|
||||
delete importObject.conversations;
|
||||
delete importObject.items;
|
||||
delete importObject.signedPreKeys;
|
||||
delete importObject.preKeys;
|
||||
delete importObject.identityKeys;
|
||||
delete importObject.sessions;
|
||||
delete importObject.unprocessed;
|
||||
|
||||
// The rest go to IndexedDB
|
||||
const transaction = db.transaction(remainingStoreNames, 'readwrite');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
'importFromJsonString transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = finish.bind(null, 'transaction complete');
|
||||
window.log.info(
|
||||
'This is a light import; contacts, groups and messages only'
|
||||
);
|
||||
}
|
||||
|
||||
_.each(remainingStoreNames, storeName => {
|
||||
const items = importObject[storeName];
|
||||
// We mutate the on-disk backup to prevent the user from importing client
|
||||
// configuration more than once - that causes lots of encryption errors.
|
||||
// This of course preserves the true data: conversations and groups.
|
||||
eliminateClientConfigInBackup(importObject, targetPath);
|
||||
|
||||
window.log.info('Importing items for store', storeName);
|
||||
const storeNames = _.keys(importObject);
|
||||
window.log.info('Importing to these stores:', storeNames.join(', '));
|
||||
|
||||
let count = 0;
|
||||
let skipCount = 0;
|
||||
// Special-case conversations key here, going to SQLCipher
|
||||
const { conversations } = importObject;
|
||||
const remainingStoreNames = _.without(
|
||||
storeNames,
|
||||
'conversations',
|
||||
'unprocessed'
|
||||
);
|
||||
await importConversationsFromJSON(conversations, options);
|
||||
|
||||
const finishStore = () => {
|
||||
// added all objects for this store
|
||||
delete importObject[storeName];
|
||||
window.log.info(
|
||||
'Done importing to store',
|
||||
storeName,
|
||||
'Total count:',
|
||||
count,
|
||||
'Skipped:',
|
||||
skipCount
|
||||
const SAVE_FUNCTIONS = {
|
||||
groups: window.Signal.Data.createOrUpdateGroup,
|
||||
identityKeys: window.Signal.Data.createOrUpdateIdentityKey,
|
||||
items: window.Signal.Data.createOrUpdateItem,
|
||||
preKeys: window.Signal.Data.createOrUpdatePreKey,
|
||||
sessions: window.Signal.Data.createOrUpdateSession,
|
||||
signedPreKeys: window.Signal.Data.createOrUpdateSignedPreKey,
|
||||
};
|
||||
|
||||
await Promise.all(
|
||||
_.map(remainingStoreNames, async storeName => {
|
||||
const save = SAVE_FUNCTIONS[storeName];
|
||||
if (!_.isFunction(save)) {
|
||||
throw new Error(
|
||||
`importFromJsonString: Didn't have save function for store ${storeName}`
|
||||
);
|
||||
if (_.keys(importObject).length === 0) {
|
||||
// added all object stores
|
||||
window.log.info('DB import complete');
|
||||
finish('puts scheduled');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (!items || !items.length) {
|
||||
finishStore();
|
||||
window.log.info(`Importing items for store ${storeName}`);
|
||||
const toImport = importObject[storeName];
|
||||
|
||||
if (!toImport || !toImport.length) {
|
||||
window.log.info(`No items in ${storeName} store`);
|
||||
return;
|
||||
}
|
||||
|
||||
_.each(items, toAdd => {
|
||||
toAdd = unstringify(toAdd);
|
||||
let skipCount = 0;
|
||||
|
||||
for (let i = 0, max = toImport.length; i < max; i += 1) {
|
||||
const toAdd = unstringify(toImport[i]);
|
||||
|
||||
const haveGroupAlready =
|
||||
storeName === 'groups' && groupLookup[getGroupKey(toAdd)];
|
||||
|
||||
if (haveGroupAlready) {
|
||||
skipCount += 1;
|
||||
count += 1;
|
||||
return;
|
||||
} else {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await save(toAdd);
|
||||
}
|
||||
|
||||
const request = transaction.objectStore(storeName).put(toAdd, toAdd.id);
|
||||
request.onsuccess = () => {
|
||||
count += 1;
|
||||
if (count + skipCount >= items.length) {
|
||||
finishStore();
|
||||
}
|
||||
};
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`importFromJsonString request error (store: ${storeName})`,
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
|
||||
// We have to check here, because we may have skipped every item, resulting
|
||||
// in no onsuccess callback at all.
|
||||
if (skipCount === count) {
|
||||
finishStore();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
window.log.info(
|
||||
'Done importing to store',
|
||||
storeName,
|
||||
'Total count:',
|
||||
toImport.length,
|
||||
'Skipped:',
|
||||
skipCount
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
window.log.info('DB import complete');
|
||||
return result;
|
||||
}
|
||||
|
||||
function createDirectory(parent, name) {
|
||||
|
@ -1043,11 +1003,11 @@ async function loadAttachments(dir, getName, options) {
|
|||
// TODO: Handle video screenshots, and image/video thumbnails
|
||||
}
|
||||
|
||||
function saveMessage(db, message) {
|
||||
return saveAllMessages(db, [message]);
|
||||
function saveMessage(message) {
|
||||
return saveAllMessages([message]);
|
||||
}
|
||||
|
||||
async function saveAllMessages(db, rawMessages) {
|
||||
async function saveAllMessages(rawMessages) {
|
||||
if (rawMessages.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -1085,7 +1045,7 @@ async function saveAllMessages(db, rawMessages) {
|
|||
// message, save it, and only then do we move on to the next message. Thus, every
|
||||
// message with attachments needs to be removed from our overall message save with the
|
||||
// filter() call.
|
||||
async function importConversation(db, dir, options) {
|
||||
async function importConversation(dir, options) {
|
||||
options = options || {};
|
||||
_.defaults(options, { messageLookup: {} });
|
||||
|
||||
|
@ -1141,7 +1101,7 @@ async function importConversation(db, dir, options) {
|
|||
message,
|
||||
key,
|
||||
});
|
||||
return saveMessage(db, message);
|
||||
return saveMessage(message);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line more/no-then
|
||||
|
@ -1153,7 +1113,7 @@ async function importConversation(db, dir, options) {
|
|||
return true;
|
||||
});
|
||||
|
||||
await saveAllMessages(db, messages);
|
||||
await saveAllMessages(messages);
|
||||
|
||||
await promiseChain;
|
||||
window.log.info(
|
||||
|
@ -1166,7 +1126,7 @@ async function importConversation(db, dir, options) {
|
|||
);
|
||||
}
|
||||
|
||||
async function importConversations(db, dir, options) {
|
||||
async function importConversations(dir, options) {
|
||||
const contents = await getDirContents(dir);
|
||||
let promiseChain = Promise.resolve();
|
||||
|
||||
|
@ -1175,8 +1135,7 @@ async function importConversations(db, dir, options) {
|
|||
return;
|
||||
}
|
||||
|
||||
const loadConversation = () =>
|
||||
importConversation(db, conversationDir, options);
|
||||
const loadConversation = () => importConversation(conversationDir, options);
|
||||
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(loadConversation);
|
||||
|
@ -1211,46 +1170,9 @@ async function loadConversationLookup() {
|
|||
function getGroupKey(group) {
|
||||
return group.id;
|
||||
}
|
||||
function loadGroupsLookup(db) {
|
||||
return assembleLookup(db, 'groups', getGroupKey);
|
||||
}
|
||||
|
||||
function assembleLookup(db, storeName, keyFunction) {
|
||||
const lookup = Object.create(null);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction(storeName, 'readwrite');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`assembleLookup(${storeName}) transaction error`,
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
// not really very useful - fires at unexpected times
|
||||
};
|
||||
|
||||
const store = transaction.objectStore(storeName);
|
||||
const request = store.openCursor();
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`assembleLookup(${storeName}) request error`,
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
request.onsuccess = event => {
|
||||
const cursor = event.target.result;
|
||||
if (cursor && cursor.value) {
|
||||
lookup[keyFunction(cursor.value)] = true;
|
||||
cursor.continue();
|
||||
} else {
|
||||
window.log.info(`Done creating ${storeName} lookup`);
|
||||
resolve(lookup);
|
||||
}
|
||||
};
|
||||
});
|
||||
async function loadGroupsLookup() {
|
||||
const array = await window.Signal.Data.getAllGroupIds();
|
||||
return fromPairs(map(array, item => [getGroupKey(item), true]));
|
||||
}
|
||||
|
||||
function getDirectoryForExport() {
|
||||
|
@ -1383,11 +1305,10 @@ async function importFromDirectory(directory, options) {
|
|||
options = options || {};
|
||||
|
||||
try {
|
||||
const db = await Whisper.Database.open();
|
||||
const lookups = await Promise.all([
|
||||
loadMessagesLookup(db),
|
||||
loadConversationLookup(db),
|
||||
loadGroupsLookup(db),
|
||||
loadMessagesLookup(),
|
||||
loadConversationLookup(),
|
||||
loadGroupsLookup(),
|
||||
]);
|
||||
const [messageLookup, conversationLookup, groupLookup] = lookups;
|
||||
options = Object.assign({}, options, {
|
||||
|
@ -1422,8 +1343,8 @@ async function importFromDirectory(directory, options) {
|
|||
options = Object.assign({}, options, {
|
||||
attachmentsDir,
|
||||
});
|
||||
const result = await importNonMessages(db, stagingDir, options);
|
||||
await importConversations(db, stagingDir, Object.assign({}, options));
|
||||
const result = await importNonMessages(stagingDir, options);
|
||||
await importConversations(stagingDir, Object.assign({}, options));
|
||||
|
||||
window.log.info('Done importing from backup!');
|
||||
return result;
|
||||
|
@ -1437,8 +1358,8 @@ async function importFromDirectory(directory, options) {
|
|||
}
|
||||
}
|
||||
|
||||
const result = await importNonMessages(db, directory, options);
|
||||
await importConversations(db, directory, options);
|
||||
const result = await importNonMessages(directory, options);
|
||||
await importConversations(directory, options);
|
||||
|
||||
window.log.info('Done importing!');
|
||||
return result;
|
||||
|
|
|
@ -1,10 +1,19 @@
|
|||
/* global window, setTimeout */
|
||||
/* global window, setTimeout, IDBKeyRange */
|
||||
|
||||
const electron = require('electron');
|
||||
|
||||
const { forEach, isFunction, isObject, merge } = require('lodash');
|
||||
const {
|
||||
cloneDeep,
|
||||
forEach,
|
||||
get,
|
||||
isFunction,
|
||||
isObject,
|
||||
map,
|
||||
merge,
|
||||
set,
|
||||
} = require('lodash');
|
||||
|
||||
const { deferredToPromise } = require('./deferred_to_promise');
|
||||
const { base64ToArrayBuffer, arrayBufferToBase64 } = require('./crypto');
|
||||
const MessageType = require('./types/message');
|
||||
|
||||
const { ipcRenderer } = electron;
|
||||
|
@ -13,11 +22,6 @@ const { ipcRenderer } = electron;
|
|||
// any warnings that might be sent to the console in that case.
|
||||
ipcRenderer.setMaxListeners(0);
|
||||
|
||||
// calls to search for when finding functions to convert:
|
||||
// .fetch(
|
||||
// .save(
|
||||
// .destroy(
|
||||
|
||||
const DATABASE_UPDATE_TIMEOUT = 2 * 60 * 1000; // two minutes
|
||||
|
||||
const SQL_CHANNEL_KEY = 'sql-channel';
|
||||
|
@ -38,6 +42,47 @@ module.exports = {
|
|||
close,
|
||||
removeDB,
|
||||
|
||||
createOrUpdateGroup,
|
||||
getGroupById,
|
||||
getAllGroupIds,
|
||||
bulkAddGroups,
|
||||
removeGroupById,
|
||||
removeAllGroups,
|
||||
|
||||
createOrUpdateIdentityKey,
|
||||
getIdentityKeyById,
|
||||
bulkAddIdentityKeys,
|
||||
removeIdentityKeyById,
|
||||
removeAllIdentityKeys,
|
||||
|
||||
createOrUpdatePreKey,
|
||||
getPreKeyById,
|
||||
bulkAddPreKeys,
|
||||
removePreKeyById,
|
||||
removeAllPreKeys,
|
||||
|
||||
createOrUpdateSignedPreKey,
|
||||
getSignedPreKeyById,
|
||||
getAllSignedPreKeys,
|
||||
bulkAddSignedPreKeys,
|
||||
removeSignedPreKeyById,
|
||||
removeAllSignedPreKeys,
|
||||
|
||||
createOrUpdateItem,
|
||||
getItemById,
|
||||
getAllItems,
|
||||
bulkAddItems,
|
||||
removeItemById,
|
||||
removeAllItems,
|
||||
|
||||
createOrUpdateSession,
|
||||
getSessionById,
|
||||
getSessionsByNumber,
|
||||
bulkAddSessions,
|
||||
removeSessionById,
|
||||
removeSessionsByNumber,
|
||||
removeAllSessions,
|
||||
|
||||
getConversationCount,
|
||||
saveConversation,
|
||||
saveConversations,
|
||||
|
@ -81,6 +126,8 @@ module.exports = {
|
|||
removeAllUnprocessed,
|
||||
|
||||
removeAll,
|
||||
removeAllConfiguration,
|
||||
|
||||
removeOtherData,
|
||||
cleanupOrphanedAttachments,
|
||||
|
||||
|
@ -229,6 +276,36 @@ forEach(module.exports, fn => {
|
|||
}
|
||||
});
|
||||
|
||||
function keysToArrayBuffer(keys, data) {
|
||||
const updated = cloneDeep(data);
|
||||
for (let i = 0, max = keys.length; i < max; i += 1) {
|
||||
const key = keys[i];
|
||||
const value = get(data, key);
|
||||
|
||||
if (value) {
|
||||
set(updated, key, base64ToArrayBuffer(value));
|
||||
}
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
function keysFromArrayBuffer(keys, data) {
|
||||
const updated = cloneDeep(data);
|
||||
for (let i = 0, max = keys.length; i < max; i += 1) {
|
||||
const key = keys[i];
|
||||
const value = get(data, key);
|
||||
|
||||
if (value) {
|
||||
set(updated, key, arrayBufferToBase64(value));
|
||||
}
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
// Top-level calls
|
||||
|
||||
// Note: will need to restart the app after calling this, to set up afresh
|
||||
async function close() {
|
||||
await channels.close();
|
||||
|
@ -239,6 +316,182 @@ async function removeDB() {
|
|||
await channels.removeDB();
|
||||
}
|
||||
|
||||
// Groups
|
||||
|
||||
async function createOrUpdateGroup(data) {
|
||||
await channels.createOrUpdateGroup(data);
|
||||
}
|
||||
async function getGroupById(id) {
|
||||
const group = await channels.getGroupById(id);
|
||||
return group;
|
||||
}
|
||||
async function getAllGroupIds() {
|
||||
const ids = await channels.getAllGroupIds();
|
||||
return ids;
|
||||
}
|
||||
async function bulkAddGroups(array) {
|
||||
await channels.bulkAddGroups(array);
|
||||
}
|
||||
async function removeGroupById(id) {
|
||||
await channels.removeGroupById(id);
|
||||
}
|
||||
async function removeAllGroups() {
|
||||
await channels.removeAllGroups();
|
||||
}
|
||||
|
||||
// Identity Keys
|
||||
|
||||
const IDENTITY_KEY_KEYS = ['publicKey'];
|
||||
async function createOrUpdateIdentityKey(data) {
|
||||
const updated = keysFromArrayBuffer(IDENTITY_KEY_KEYS, data);
|
||||
await channels.createOrUpdateIdentityKey(updated);
|
||||
}
|
||||
async function getIdentityKeyById(id) {
|
||||
const data = await channels.getIdentityKeyById(id);
|
||||
return keysToArrayBuffer(IDENTITY_KEY_KEYS, data);
|
||||
}
|
||||
async function bulkAddIdentityKeys(array) {
|
||||
const updated = map(array, data =>
|
||||
keysFromArrayBuffer(IDENTITY_KEY_KEYS, data)
|
||||
);
|
||||
await channels.bulkAddIdentityKeys(updated);
|
||||
}
|
||||
async function removeIdentityKeyById(id) {
|
||||
await channels.removeIdentityKeyById(id);
|
||||
}
|
||||
async function removeAllIdentityKeys() {
|
||||
await channels.removeAllIdentityKeys();
|
||||
}
|
||||
|
||||
// Pre Keys
|
||||
|
||||
async function createOrUpdatePreKey(data) {
|
||||
const updated = keysFromArrayBuffer(PRE_KEY_KEYS, data);
|
||||
await channels.createOrUpdatePreKey(updated);
|
||||
}
|
||||
async function getPreKeyById(id) {
|
||||
const data = await channels.getPreKeyById(id);
|
||||
return keysToArrayBuffer(PRE_KEY_KEYS, data);
|
||||
}
|
||||
async function bulkAddPreKeys(array) {
|
||||
const updated = map(array, data => keysFromArrayBuffer(PRE_KEY_KEYS, data));
|
||||
await channels.bulkAddPreKeys(updated);
|
||||
}
|
||||
async function removePreKeyById(id) {
|
||||
await channels.removePreKeyById(id);
|
||||
}
|
||||
async function removeAllPreKeys() {
|
||||
await channels.removeAllPreKeys();
|
||||
}
|
||||
|
||||
// Signed Pre Keys
|
||||
|
||||
const PRE_KEY_KEYS = ['privateKey', 'publicKey'];
|
||||
async function createOrUpdateSignedPreKey(data) {
|
||||
const updated = keysFromArrayBuffer(PRE_KEY_KEYS, data);
|
||||
await channels.createOrUpdateSignedPreKey(updated);
|
||||
}
|
||||
async function getSignedPreKeyById(id) {
|
||||
const data = await channels.getSignedPreKeyById(id);
|
||||
return keysToArrayBuffer(PRE_KEY_KEYS, data);
|
||||
}
|
||||
async function getAllSignedPreKeys() {
|
||||
const keys = await channels.getAllSignedPreKeys();
|
||||
return keys;
|
||||
}
|
||||
async function bulkAddSignedPreKeys(array) {
|
||||
const updated = map(array, data => keysFromArrayBuffer(PRE_KEY_KEYS, data));
|
||||
await channels.bulkAddSignedPreKeys(updated);
|
||||
}
|
||||
async function removeSignedPreKeyById(id) {
|
||||
await channels.removeSignedPreKeyById(id);
|
||||
}
|
||||
async function removeAllSignedPreKeys() {
|
||||
await channels.removeAllSignedPreKeys();
|
||||
}
|
||||
|
||||
// Items
|
||||
|
||||
const ITEM_KEYS = {
|
||||
identityKey: ['value.pubKey', 'value.privKey'],
|
||||
senderCertificate: [
|
||||
'value.certificate',
|
||||
'value.signature',
|
||||
'value.serialized',
|
||||
],
|
||||
signaling_key: ['value'],
|
||||
profileKey: ['value'],
|
||||
};
|
||||
async function createOrUpdateItem(data) {
|
||||
const { id } = data;
|
||||
if (!id) {
|
||||
throw new Error(
|
||||
'createOrUpdateItem: Provided data did not have a truthy id'
|
||||
);
|
||||
}
|
||||
|
||||
const keys = ITEM_KEYS[id];
|
||||
const updated = Array.isArray(keys) ? keysFromArrayBuffer(keys, data) : data;
|
||||
|
||||
await channels.createOrUpdateItem(updated);
|
||||
}
|
||||
async function getItemById(id) {
|
||||
const keys = ITEM_KEYS[id];
|
||||
const data = await channels.getItemById(id);
|
||||
|
||||
return Array.isArray(keys) ? keysToArrayBuffer(keys, data) : data;
|
||||
}
|
||||
async function getAllItems() {
|
||||
const items = await channels.getAllItems();
|
||||
return map(items, item => {
|
||||
const { id } = item;
|
||||
const keys = ITEM_KEYS[id];
|
||||
return Array.isArray(keys) ? keysToArrayBuffer(keys, item) : item;
|
||||
});
|
||||
}
|
||||
async function bulkAddItems(array) {
|
||||
const updated = map(array, data => {
|
||||
const { id } = data;
|
||||
const keys = ITEM_KEYS[id];
|
||||
return Array.isArray(keys) ? keysFromArrayBuffer(keys, data) : data;
|
||||
});
|
||||
await channels.bulkAddItems(updated);
|
||||
}
|
||||
async function removeItemById(id) {
|
||||
await channels.removeItemById(id);
|
||||
}
|
||||
async function removeAllItems() {
|
||||
await channels.removeAllItems();
|
||||
}
|
||||
|
||||
// Sessions
|
||||
|
||||
async function createOrUpdateSession(data) {
|
||||
await channels.createOrUpdateSession(data);
|
||||
}
|
||||
async function getSessionById(id) {
|
||||
const session = await channels.getSessionById(id);
|
||||
return session;
|
||||
}
|
||||
async function getSessionsByNumber(number) {
|
||||
const sessions = await channels.getSessionsByNumber(number);
|
||||
return sessions;
|
||||
}
|
||||
async function bulkAddSessions(array) {
|
||||
await channels.bulkAddSessions(array);
|
||||
}
|
||||
async function removeSessionById(id) {
|
||||
await channels.removeSessionById(id);
|
||||
}
|
||||
async function removeSessionsByNumber(number) {
|
||||
await channels.removeSessionsByNumber(number);
|
||||
}
|
||||
async function removeAllSessions(id) {
|
||||
await channels.removeAllSessions(id);
|
||||
}
|
||||
|
||||
// Conversation
|
||||
|
||||
async function getConversationCount() {
|
||||
return channels.getConversationCount();
|
||||
}
|
||||
|
@ -319,6 +572,8 @@ async function searchConversations(query, { ConversationCollection }) {
|
|||
return collection;
|
||||
}
|
||||
|
||||
// Message
|
||||
|
||||
async function getMessageCount() {
|
||||
return channels.getMessageCount();
|
||||
}
|
||||
|
@ -329,10 +584,41 @@ async function saveMessage(data, { forceSave, Message } = {}) {
|
|||
return id;
|
||||
}
|
||||
|
||||
async function saveLegacyMessage(data, { Message }) {
|
||||
const message = new Message(data);
|
||||
await deferredToPromise(message.save());
|
||||
return message.id;
|
||||
async function saveLegacyMessage(data) {
|
||||
const db = await window.Whisper.Database.open();
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction('messages', 'readwrite');
|
||||
|
||||
transaction.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'saveLegacyMessage transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = resolve;
|
||||
|
||||
const store = transaction.objectStore('messages');
|
||||
|
||||
if (!data.id) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.id = window.getGuid();
|
||||
}
|
||||
|
||||
const request = store.put(data, data.id);
|
||||
request.onsuccess = resolve;
|
||||
request.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'saveLegacyMessage request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function saveMessages(arrayOfMessages, { forceSave } = {}) {
|
||||
|
@ -459,6 +745,8 @@ async function getNextExpiringMessage({ MessageCollection }) {
|
|||
return new MessageCollection(messages);
|
||||
}
|
||||
|
||||
// Unprocessed
|
||||
|
||||
async function getUnprocessedCount() {
|
||||
return channels.getUnprocessedCount();
|
||||
}
|
||||
|
@ -495,10 +783,16 @@ async function removeAllUnprocessed() {
|
|||
await channels.removeAllUnprocessed();
|
||||
}
|
||||
|
||||
// Other
|
||||
|
||||
async function removeAll() {
|
||||
await channels.removeAll();
|
||||
}
|
||||
|
||||
async function removeAllConfiguration() {
|
||||
await channels.removeAllConfiguration();
|
||||
}
|
||||
|
||||
async function cleanupOrphanedAttachments() {
|
||||
await callChannel(CLEANUP_ORPHANED_ATTACHMENTS_KEY);
|
||||
}
|
||||
|
@ -529,28 +823,61 @@ async function callChannel(name) {
|
|||
});
|
||||
}
|
||||
|
||||
// Functions below here return JSON
|
||||
// Functions below here return plain JSON instead of Backbone Models
|
||||
|
||||
async function getLegacyMessagesNeedingUpgrade(
|
||||
limit,
|
||||
{ MessageCollection, maxVersion = MessageType.CURRENT_SCHEMA_VERSION }
|
||||
{ maxVersion = MessageType.CURRENT_SCHEMA_VERSION }
|
||||
) {
|
||||
const messages = new MessageCollection();
|
||||
const db = await window.Whisper.Database.open();
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction('messages', 'readonly');
|
||||
const messages = [];
|
||||
|
||||
await deferredToPromise(
|
||||
messages.fetch({
|
||||
limit,
|
||||
index: {
|
||||
name: 'schemaVersion',
|
||||
upper: maxVersion,
|
||||
excludeUpper: true,
|
||||
order: 'desc',
|
||||
},
|
||||
})
|
||||
);
|
||||
transaction.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'getLegacyMessagesNeedingUpgrade transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
resolve(messages);
|
||||
};
|
||||
|
||||
const models = messages.models || [];
|
||||
return models.map(model => model.toJSON());
|
||||
const store = transaction.objectStore('messages');
|
||||
const index = store.index('schemaVersion');
|
||||
const range = IDBKeyRange.upperBound(maxVersion, true);
|
||||
|
||||
const request = index.openCursor(range);
|
||||
let count = 0;
|
||||
|
||||
request.onsuccess = event => {
|
||||
const cursor = event.target.result;
|
||||
|
||||
if (cursor) {
|
||||
count += 1;
|
||||
messages.push(cursor.value);
|
||||
|
||||
if (count >= limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
cursor.continue();
|
||||
}
|
||||
};
|
||||
request.onerror = () => {
|
||||
window.Whisper.Database.handleDOMException(
|
||||
'getLegacyMessagesNeedingUpgrade request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
});
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function getMessagesNeedingUpgrade(
|
||||
|
|
168
js/modules/indexeddb.js
Normal file
168
js/modules/indexeddb.js
Normal file
|
@ -0,0 +1,168 @@
|
|||
/* global window, Whisper, textsecure */
|
||||
|
||||
const { isFunction } = require('lodash');
|
||||
|
||||
const MessageDataMigrator = require('./messages_data_migrator');
|
||||
const {
|
||||
run,
|
||||
getLatestVersion,
|
||||
getDatabase,
|
||||
} = require('./migrations/migrations');
|
||||
|
||||
const MESSAGE_MINIMUM_VERSION = 7;
|
||||
|
||||
module.exports = {
|
||||
doesDatabaseExist,
|
||||
mandatoryMessageUpgrade,
|
||||
MESSAGE_MINIMUM_VERSION,
|
||||
migrateAllToSQLCipher,
|
||||
removeDatabase,
|
||||
runMigrations,
|
||||
};
|
||||
|
||||
async function runMigrations() {
|
||||
window.log.info('Run migrations on database with attachment data');
|
||||
await run({
|
||||
Backbone: window.Backbone,
|
||||
logger: window.log,
|
||||
});
|
||||
|
||||
Whisper.Database.migrations[0].version = getLatestVersion();
|
||||
}
|
||||
|
||||
async function mandatoryMessageUpgrade({ upgradeMessageSchema } = {}) {
|
||||
if (!isFunction(upgradeMessageSchema)) {
|
||||
throw new Error(
|
||||
'mandatoryMessageUpgrade: upgradeMessageSchema must be a function!'
|
||||
);
|
||||
}
|
||||
|
||||
const NUM_MESSAGES_PER_BATCH = 10;
|
||||
window.log.info(
|
||||
'upgradeMessages: Mandatory message schema upgrade started.',
|
||||
`Target version: ${MESSAGE_MINIMUM_VERSION}`
|
||||
);
|
||||
|
||||
let isMigrationWithoutIndexComplete = false;
|
||||
while (!isMigrationWithoutIndexComplete) {
|
||||
const database = getDatabase();
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
|
||||
{
|
||||
databaseName: database.name,
|
||||
minDatabaseVersion: database.version,
|
||||
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
|
||||
upgradeMessageSchema,
|
||||
maxVersion: MESSAGE_MINIMUM_VERSION,
|
||||
BackboneMessage: Whisper.Message,
|
||||
saveMessage: window.Signal.Data.saveLegacyMessage,
|
||||
}
|
||||
);
|
||||
window.log.info(
|
||||
'upgradeMessages: upgrade without index',
|
||||
batchWithoutIndex
|
||||
);
|
||||
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
|
||||
}
|
||||
window.log.info('upgradeMessages: upgrade without index complete!');
|
||||
|
||||
let isMigrationWithIndexComplete = false;
|
||||
while (!isMigrationWithIndexComplete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const batchWithIndex = await MessageDataMigrator.processNext({
|
||||
BackboneMessage: Whisper.Message,
|
||||
BackboneMessageCollection: Whisper.MessageCollection,
|
||||
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
|
||||
upgradeMessageSchema,
|
||||
getMessagesNeedingUpgrade:
|
||||
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
|
||||
saveMessage: window.Signal.Data.saveLegacyMessage,
|
||||
maxVersion: MESSAGE_MINIMUM_VERSION,
|
||||
});
|
||||
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
|
||||
isMigrationWithIndexComplete = batchWithIndex.done;
|
||||
}
|
||||
window.log.info('upgradeMessages: upgrade with index complete!');
|
||||
|
||||
window.log.info('upgradeMessages: Message schema upgrade complete');
|
||||
}
|
||||
|
||||
async function migrateAllToSQLCipher({ writeNewAttachmentData, Views } = {}) {
|
||||
if (!isFunction(writeNewAttachmentData)) {
|
||||
throw new Error(
|
||||
'migrateAllToSQLCipher: writeNewAttachmentData must be a function'
|
||||
);
|
||||
}
|
||||
if (!Views) {
|
||||
throw new Error('migrateAllToSQLCipher: Views must be provided!');
|
||||
}
|
||||
|
||||
let totalMessages;
|
||||
const db = await Whisper.Database.open();
|
||||
|
||||
function showMigrationStatus(current) {
|
||||
const status = `${current}/${totalMessages}`;
|
||||
Views.Initialization.setMessage(
|
||||
window.i18n('migratingToSQLCipher', [status])
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
totalMessages = await MessageDataMigrator.getNumMessages({
|
||||
connection: db,
|
||||
});
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'background.getNumMessages error:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
totalMessages = 0;
|
||||
}
|
||||
|
||||
if (totalMessages) {
|
||||
window.log.info(`About to migrate ${totalMessages} messages`);
|
||||
showMigrationStatus(0);
|
||||
} else {
|
||||
window.log.info('About to migrate non-messages');
|
||||
}
|
||||
|
||||
await window.Signal.migrateToSQL({
|
||||
db,
|
||||
clearStores: Whisper.Database.clearStores,
|
||||
handleDOMException: Whisper.Database.handleDOMException,
|
||||
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
|
||||
countCallback: count => {
|
||||
window.log.info(`Migration: ${count} messages complete`);
|
||||
showMigrationStatus(count);
|
||||
},
|
||||
writeNewAttachmentData,
|
||||
});
|
||||
|
||||
db.close();
|
||||
}
|
||||
|
||||
async function doesDatabaseExist() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { id } = Whisper.Database;
|
||||
const req = window.indexedDB.open(id);
|
||||
|
||||
let existed = true;
|
||||
|
||||
req.onerror = reject;
|
||||
req.onsuccess = () => {
|
||||
req.result.close();
|
||||
resolve(existed);
|
||||
};
|
||||
req.onupgradeneeded = () => {
|
||||
if (req.result.version === 1) {
|
||||
existed = false;
|
||||
window.indexedDB.deleteDatabase(id);
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function removeDatabase() {
|
||||
window.log.info(`Deleting IndexedDB database '${Whisper.Database.id}'`);
|
||||
window.indexedDB.deleteDatabase(Whisper.Database.id);
|
||||
}
|
|
@ -2,10 +2,26 @@
|
|||
|
||||
const { includes, isFunction, isString, last, map } = require('lodash');
|
||||
const {
|
||||
bulkAddGroups,
|
||||
bulkAddSessions,
|
||||
bulkAddIdentityKeys,
|
||||
bulkAddPreKeys,
|
||||
bulkAddSignedPreKeys,
|
||||
bulkAddItems,
|
||||
|
||||
removeGroupById,
|
||||
removeSessionById,
|
||||
removeIdentityKeyById,
|
||||
removePreKeyById,
|
||||
removeSignedPreKeyById,
|
||||
removeItemById,
|
||||
|
||||
saveMessages,
|
||||
_removeMessages,
|
||||
|
||||
saveUnprocesseds,
|
||||
removeUnprocessed,
|
||||
|
||||
saveConversations,
|
||||
_removeConversations,
|
||||
} = require('./data');
|
||||
|
@ -132,6 +148,8 @@ async function migrateToSQL({
|
|||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
|
@ -163,6 +181,153 @@ async function migrateToSQL({
|
|||
window.log.warn('Failed to clear conversations store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddGroups,
|
||||
remove: removeGroupById,
|
||||
storeName: 'groups',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of groups complete');
|
||||
try {
|
||||
await clearStores(['groups']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear groups store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddSessions,
|
||||
remove: removeSessionById,
|
||||
storeName: 'sessions',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of sessions complete');
|
||||
try {
|
||||
await clearStores(['sessions']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear sessions store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddIdentityKeys,
|
||||
remove: removeIdentityKeyById,
|
||||
storeName: 'identityKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of identityKeys complete');
|
||||
try {
|
||||
await clearStores(['identityKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear identityKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddPreKeys,
|
||||
remove: removePreKeyById,
|
||||
storeName: 'preKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of preKeys complete');
|
||||
try {
|
||||
await clearStores(['preKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear preKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddSignedPreKeys,
|
||||
remove: removeSignedPreKeyById,
|
||||
storeName: 'signedPreKeys',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of signedPreKeys complete');
|
||||
try {
|
||||
await clearStores(['signedPreKeys']);
|
||||
} catch (error) {
|
||||
window.log.warn('Failed to clear signedPreKeys store');
|
||||
}
|
||||
|
||||
complete = false;
|
||||
lastIndex = null;
|
||||
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const status = await migrateStoreToSQLite({
|
||||
db,
|
||||
// eslint-disable-next-line no-loop-func
|
||||
save: bulkAddItems,
|
||||
remove: removeItemById,
|
||||
storeName: 'items',
|
||||
handleDOMException,
|
||||
lastIndex,
|
||||
batchSize: 10,
|
||||
});
|
||||
|
||||
({ complete, lastIndex } = status);
|
||||
}
|
||||
window.log.info('migrateToSQL: migrate of items complete');
|
||||
// Note: we don't clear the items store because it contains important metadata which,
|
||||
// if this process fails, will be crucial to going through this process again.
|
||||
|
||||
window.log.info('migrateToSQL: complete');
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
/* global window, Whisper */
|
||||
|
||||
const Migrations0DatabaseWithAttachmentData = require('./migrations_0_database_with_attachment_data');
|
||||
const Migrations = require('./migrations');
|
||||
|
||||
exports.getPlaceholderMigrations = () => {
|
||||
const last0MigrationVersion = Migrations0DatabaseWithAttachmentData.getLatestVersion();
|
||||
const version = Migrations.getLatestVersion();
|
||||
|
||||
return [
|
||||
{
|
||||
version: last0MigrationVersion,
|
||||
version,
|
||||
migrate() {
|
||||
throw new Error(
|
||||
'Unexpected invocation of placeholder migration!' +
|
||||
|
|
|
@ -170,8 +170,19 @@ const migrations = [
|
|||
migrate(transaction, next) {
|
||||
window.log.info('Migration 19');
|
||||
|
||||
// Empty because we don't want to cause incompatibility with beta users who have
|
||||
// already run migration 19 when it was object store removal.
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 20,
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 20');
|
||||
|
||||
// Empty because we don't want to cause incompatibility with users who have already
|
||||
// run migration 19 when it was the object store removal.
|
||||
// run migration 20 when it was object store removal.
|
||||
|
||||
next();
|
||||
},
|
|
@ -1,84 +0,0 @@
|
|||
/* global window */
|
||||
|
||||
const { last, includes } = require('lodash');
|
||||
|
||||
const { open } = require('../database');
|
||||
const settings = require('../settings');
|
||||
const { runMigrations } = require('./run_migrations');
|
||||
|
||||
// These are cleanup migrations, to be run after migration to SQLCipher
|
||||
exports.migrations = [
|
||||
{
|
||||
version: 20,
|
||||
migrate(transaction, next) {
|
||||
window.log.info('Migration 20');
|
||||
|
||||
const { db } = transaction;
|
||||
|
||||
// This should be run after things are migrated to SQLCipher
|
||||
|
||||
// We check for existence first, because this removal was present in v1.17.0.beta.1,
|
||||
// but reverted in v1.17.0-beta.3
|
||||
|
||||
if (includes(db.objectStoreNames, 'messages')) {
|
||||
window.log.info('Removing messages store');
|
||||
db.deleteObjectStore('messages');
|
||||
}
|
||||
if (includes(db.objectStoreNames, 'unprocessed')) {
|
||||
window.log.info('Removing unprocessed store');
|
||||
db.deleteObjectStore('unprocessed');
|
||||
}
|
||||
if (includes(db.objectStoreNames, 'conversations')) {
|
||||
window.log.info('Removing conversations store');
|
||||
db.deleteObjectStore('conversations');
|
||||
}
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
exports.run = async ({ Backbone, logger } = {}) => {
|
||||
const database = {
|
||||
id: 'signal',
|
||||
nolog: true,
|
||||
migrations: exports.migrations,
|
||||
};
|
||||
|
||||
const { canRun } = await exports.getStatus({ database });
|
||||
if (!canRun) {
|
||||
throw new Error(
|
||||
'Cannot run migrations on database without attachment data'
|
||||
);
|
||||
}
|
||||
|
||||
await runMigrations({
|
||||
Backbone,
|
||||
logger,
|
||||
database,
|
||||
});
|
||||
};
|
||||
|
||||
exports.getStatus = async ({ database } = {}) => {
|
||||
const connection = await open(database.id, database.version);
|
||||
const isAttachmentMigrationComplete = await settings.isAttachmentMigrationComplete(
|
||||
connection
|
||||
);
|
||||
const hasMigrations = exports.migrations.length > 0;
|
||||
|
||||
const canRun = isAttachmentMigrationComplete && hasMigrations;
|
||||
return {
|
||||
isAttachmentMigrationComplete,
|
||||
hasMigrations,
|
||||
canRun,
|
||||
};
|
||||
};
|
||||
|
||||
exports.getLatestVersion = () => {
|
||||
const lastMigration = last(exports.migrations);
|
||||
if (!lastMigration) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return lastMigration.version;
|
||||
};
|
|
@ -52,7 +52,10 @@ exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
|
|||
storeName: 'items',
|
||||
}))();
|
||||
|
||||
// Note: this legacy migration technique is required to bring old clients with
|
||||
// data in IndexedDB forward into the new world of SQLCipher only.
|
||||
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
|
||||
|
||||
logger.info('Close database connection');
|
||||
await closeDatabaseConnection({ Backbone });
|
||||
};
|
||||
|
|
|
@ -5,6 +5,7 @@ const Crypto = require('./crypto');
|
|||
const Data = require('./data');
|
||||
const Database = require('./database');
|
||||
const Emoji = require('../../ts/util/emoji');
|
||||
const IndexedDB = require('./indexeddb');
|
||||
const Notifications = require('../../ts/notifications');
|
||||
const OS = require('../../ts/OS');
|
||||
const Settings = require('./settings');
|
||||
|
@ -63,9 +64,7 @@ const {
|
|||
getPlaceholderMigrations,
|
||||
getCurrentVersion,
|
||||
} = require('./migrations/get_placeholder_migrations');
|
||||
|
||||
const Migrations0DatabaseWithAttachmentData = require('./migrations/migrations_0_database_with_attachment_data');
|
||||
const Migrations1DatabaseWithoutAttachmentData = require('./migrations/migrations_1_database_without_attachment_data');
|
||||
const { run } = require('./migrations/migrations');
|
||||
|
||||
// Types
|
||||
const AttachmentType = require('./types/attachment');
|
||||
|
@ -132,8 +131,7 @@ function initializeMigrations({
|
|||
loadAttachmentData,
|
||||
loadQuoteData,
|
||||
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
|
||||
Migrations0DatabaseWithAttachmentData,
|
||||
Migrations1DatabaseWithoutAttachmentData,
|
||||
run,
|
||||
upgradeMessageSchema: (message, options = {}) => {
|
||||
const { maxVersion } = options;
|
||||
|
||||
|
@ -225,6 +223,7 @@ exports.setup = (options = {}) => {
|
|||
Data,
|
||||
Database,
|
||||
Emoji,
|
||||
IndexedDB,
|
||||
Migrations,
|
||||
Notifications,
|
||||
OS,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue