Move to centralized message/cache data layer

Also, ensure that conversation.messageCollection has nothing in it
unless it has an associated ConversationView.
This commit is contained in:
Scott Nonnenberg 2018-07-25 15:02:27 -07:00
parent 34231168a7
commit f39a96bc76
21 changed files with 1119 additions and 993 deletions

View file

@ -674,6 +674,7 @@ async function exportConversation(db, conversation, options) {
const writer = await createFileAndWriter(dir, 'messages.json');
return new Promise(async (resolve, reject) => {
// TODO: need to iterate through message ids, export using window.Signal.Data
const transaction = db.transaction('messages', 'readwrite');
transaction.onerror = () => {
Whisper.Database.handleDOMException(
@ -980,6 +981,8 @@ async function loadAttachments(dir, getName, options) {
})
);
// TODO: Handle video screenshots, and image/video thumbnails
window.log.info('loadAttachments', { message });
}
@ -989,63 +992,37 @@ function saveMessage(db, message) {
async function saveAllMessages(db, rawMessages) {
if (rawMessages.length === 0) {
return Promise.resolve();
return;
}
const { writeMessageAttachments, upgradeMessageSchema } = Signal.Migrations;
const importAndUpgrade = async message =>
upgradeMessageSchema(await writeMessageAttachments(message));
try {
const { writeMessageAttachments, upgradeMessageSchema } = Signal.Migrations;
const importAndUpgrade = async message =>
upgradeMessageSchema(await writeMessageAttachments(message));
const messages = await Promise.all(rawMessages.map(importAndUpgrade));
const messages = await Promise.all(rawMessages.map(importAndUpgrade));
return new Promise((resolve, reject) => {
let finished = false;
const finish = via => {
window.log.info('messages done saving via', via);
if (finished) {
resolve();
}
finished = true;
};
const transaction = db.transaction('messages', 'readwrite');
transaction.onerror = () => {
Whisper.Database.handleDOMException(
'saveAllMessages transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = finish.bind(null, 'transaction complete');
const store = transaction.objectStore('messages');
const { conversationId } = messages[0];
let count = 0;
_.forEach(messages, message => {
const request = store.put(message, message.id);
request.onsuccess = () => {
count += 1;
if (count === messages.length) {
window.log.info(
'Saved',
messages.length,
'messages for conversation',
// Don't know if group or private conversation, so we blindly redact
`[REDACTED]${conversationId.slice(-3)}`
);
finish('puts scheduled');
}
};
request.onerror = () => {
Whisper.Database.handleDOMException(
'saveAllMessages request error',
request.error,
reject
);
};
});
});
for (let index = 0, max = messages.length; index < max; index += 1) {
// Yes, we really want to do these in order
// eslint-disable-next-line no-await-in-loop
await window.Signal.Data.saveMessage(messages[index]);
}
window.log.info(
'Saved',
messages.length,
'messages for conversation',
// Don't know if group or private conversation, so we blindly redact
`[REDACTED]${conversationId.slice(-3)}`
);
} catch (error) {
window.log.error(
'saveAllMessages error',
error && error.message ? error.message : error
);
}
}
// To reduce the memory impact of attachments, we make individual saves to the
@ -1095,8 +1072,9 @@ async function importConversation(db, dir, options) {
message.quote &&
message.quote.attachments &&
message.quote.attachments.length > 0;
const hasContacts = message.contact && message.contact.length;
if (hasAttachments || hasQuotedAttachments) {
if (hasAttachments || hasQuotedAttachments || hasContacts) {
const importMessage = async () => {
const getName = attachmentsDir
? _getAnonymousAttachmentFileName
@ -1163,7 +1141,11 @@ function getMessageKey(message) {
return `${source}.${sourceDevice} ${message.timestamp}`;
}
function loadMessagesLookup(db) {
return assembleLookup(db, 'messages', getMessageKey);
return window.Signal.Data.getAllMessageIds({
db,
getMessageKey,
handleDOMException: Whisper.Database.handleDOMException,
});
}
function getConversationKey(conversation) {

349
js/modules/data.js Normal file
View file

@ -0,0 +1,349 @@
/* global window */
const { deferredToPromise } = require('./deferred_to_promise');
const MessageType = require('./types/message');
// calls to search for:
// .fetch(
// .save(
// .destroy(
async function saveMessage(data, { Message }) {
const message = new Message(data);
await deferredToPromise(message.save());
return message.id;
}
async function removeMessage(id, { Message }) {
const message = await getMessageById(id, { Message });
// Note: It's important to have a fully database-hydrated model to delete here because
// it needs to delete all associated on-disk files along with the database delete.
if (message) {
await deferredToPromise(message.destroy());
}
}
async function getMessageById(id, { Message }) {
const message = new Message({ id });
try {
await deferredToPromise(message.fetch());
return message;
} catch (error) {
return null;
}
}
async function getAllMessageIds({ db, handleDOMException, getMessageKey }) {
const lookup = Object.create(null);
const storeName = 'messages';
return new Promise((resolve, reject) => {
const transaction = db.transaction(storeName, 'readwrite');
transaction.onerror = () => {
handleDOMException(
`assembleLookup(${storeName}) transaction error`,
transaction.error,
reject
);
};
transaction.oncomplete = () => {
// not really very useful - fires at unexpected times
};
const store = transaction.objectStore(storeName);
const request = store.openCursor();
request.onerror = () => {
handleDOMException(
`assembleLookup(${storeName}) request error`,
request.error,
reject
);
};
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor && cursor.value) {
lookup[getMessageKey(cursor.value)] = true;
cursor.continue();
} else {
window.log.info(`Done creating ${storeName} lookup`);
resolve(lookup);
}
};
});
}
async function getMessageBySender(
// eslint-disable-next-line camelcase
{ source, sourceDevice, sent_at },
{ Message }
) {
const fetcher = new Message();
const options = {
index: {
name: 'unique',
// eslint-disable-next-line camelcase
value: [source, sourceDevice, sent_at],
},
};
try {
await deferredToPromise(fetcher.fetch(options));
if (fetcher.get('id')) {
return fetcher;
}
return null;
} catch (error) {
return null;
}
}
async function getUnreadByConversation(conversationId, { MessageCollection }) {
const messages = new MessageCollection();
await deferredToPromise(
messages.fetch({
index: {
// 'unread' index
name: 'unread',
lower: [conversationId],
upper: [conversationId, Number.MAX_VALUE],
},
})
);
return messages;
}
async function getMessagesByConversation(
conversationId,
{ limit = 100, receivedAt = Number.MAX_VALUE, MessageCollection }
) {
const messages = new MessageCollection();
const options = {
limit,
index: {
// 'conversation' index on [conversationId, received_at]
name: 'conversation',
lower: [conversationId],
upper: [conversationId, receivedAt],
order: 'desc',
// SELECT messages WHERE conversationId = this.id ORDER
// received_at DESC
},
};
await deferredToPromise(messages.fetch(options));
return messages;
}
async function removeAllMessagesInConversation(
conversationId,
{ MessageCollection }
) {
const messages = new MessageCollection();
let loaded;
do {
// Yes, we really want the await in the loop. We're deleting 100 at a
// time so we don't use too much memory.
// eslint-disable-next-line no-await-in-loop
await deferredToPromise(
messages.fetch({
limit: 100,
index: {
// 'conversation' index on [conversationId, received_at]
name: 'conversation',
lower: [conversationId],
upper: [conversationId, Number.MAX_VALUE],
},
})
);
loaded = messages.models;
messages.reset([]);
// Note: It's very important that these models are fully hydrated because
// we need to delete all associated on-disk files along with the database delete.
loaded.map(message => message.destroy());
} while (loaded.length > 0);
}
async function getMessagesBySentAt(sentAt, { MessageCollection }) {
const messages = new MessageCollection();
await deferredToPromise(
messages.fetch({
index: {
// 'receipt' index on sent_at
name: 'receipt',
only: sentAt,
},
})
);
return messages;
}
async function getExpiredMessages({ MessageCollection }) {
window.log.info('Load expired messages');
const messages = new MessageCollection();
await deferredToPromise(
messages.fetch({
conditions: {
expires_at: {
$lte: Date.now(),
},
},
})
);
return messages;
}
async function getNextExpiringMessage({ MessageCollection }) {
const messages = new MessageCollection();
await deferredToPromise(
messages.fetch({
limit: 1,
index: {
name: 'expires_at',
},
})
);
return messages;
}
async function saveUnprocessed(data, { Unprocessed }) {
const unprocessed = new Unprocessed(data);
return deferredToPromise(unprocessed.save());
}
async function getAllUnprocessed({ UnprocessedCollection }) {
const collection = new UnprocessedCollection();
await deferredToPromise(collection.fetch());
return collection.map(model => model.attributes);
}
async function updateUnprocessed(id, updates, { Unprocessed }) {
const unprocessed = new Unprocessed({
id,
});
await deferredToPromise(unprocessed.fetch());
unprocessed.set(updates);
await saveUnprocessed(unprocessed.attributes, { Unprocessed });
}
async function removeUnprocessed(id, { Unprocessed }) {
const unprocessed = new Unprocessed({
id,
});
await deferredToPromise(unprocessed.destroy());
}
async function removeAllUnprocessed() {
// erase everything in unprocessed table
}
async function removeAll() {
// erase everything in the database
}
async function getMessagesNeedingUpgrade(limit, { MessageCollection }) {
const messages = new MessageCollection();
await deferredToPromise(
messages.fetch({
limit,
index: {
name: 'schemaVersion',
upper: MessageType.CURRENT_SCHEMA_VERSION,
excludeUpper: true,
order: 'desc',
},
})
);
const models = messages.models || [];
return models.map(model => model.toJSON());
}
async function getMessagesWithVisualMediaAttachments(
conversationId,
{ limit, MessageCollection }
) {
const messages = new MessageCollection();
const lowerReceivedAt = 0;
const upperReceivedAt = Number.MAX_VALUE;
await deferredToPromise(
messages.fetch({
limit,
index: {
name: 'hasVisualMediaAttachments',
lower: [conversationId, lowerReceivedAt, 1],
upper: [conversationId, upperReceivedAt, 1],
order: 'desc',
},
})
);
return messages.models.map(model => model.toJSON());
}
async function getMessagesWithFileAttachments(
conversationId,
{ limit, MessageCollection }
) {
const messages = new MessageCollection();
const lowerReceivedAt = 0;
const upperReceivedAt = Number.MAX_VALUE;
await deferredToPromise(
messages.fetch({
limit,
index: {
name: 'hasFileAttachments',
lower: [conversationId, lowerReceivedAt, 1],
upper: [conversationId, upperReceivedAt, 1],
order: 'desc',
},
})
);
return messages.models.map(model => model.toJSON());
}
module.exports = {
saveMessage,
removeMessage,
getUnreadByConversation,
removeAllMessagesInConversation,
getMessageBySender,
getMessageById,
getAllMessageIds,
getMessagesBySentAt,
getExpiredMessages,
getNextExpiringMessage,
getMessagesByConversation,
getAllUnprocessed,
saveUnprocessed,
updateUnprocessed,
removeUnprocessed,
removeAllUnprocessed,
removeAll,
// Returning plain JSON
getMessagesNeedingUpgrade,
getMessagesWithVisualMediaAttachments,
getMessagesWithFileAttachments,
};

View file

@ -1,5 +1,5 @@
/* eslint-env node */
/* global log */
/* global log, Signal */
const fs = require('fs-extra');
const path = require('path');
@ -58,9 +58,8 @@ exports.createConversation = async ({
range(0, numMessages).map(async index => {
await sleep(index * 100);
log.info(`Create message ${index + 1}`);
const messageAttributes = await createRandomMessage({ conversationId });
const message = new WhisperMessage(messageAttributes);
return deferredToPromise(message.save());
const message = await createRandomMessage({ conversationId });
return Signal.Data.saveMessage(message);
})
);
};

View file

@ -11,7 +11,6 @@ const { isFunction, isNumber, isObject, isString, last } = require('lodash');
const database = require('./database');
const Message = require('./types/message');
const settings = require('./settings');
const { deferredToPromise } = require('./deferred_to_promise');
const MESSAGES_STORE_NAME = 'messages';
@ -20,6 +19,8 @@ exports.processNext = async ({
BackboneMessageCollection,
numMessagesPerBatch,
upgradeMessageSchema,
getMessagesNeedingUpgrade,
saveMessage,
} = {}) => {
if (!isFunction(BackboneMessage)) {
throw new TypeError(
@ -45,10 +46,10 @@ exports.processNext = async ({
const startTime = Date.now();
const fetchStartTime = Date.now();
const messagesRequiringSchemaUpgrade = await _fetchMessagesRequiringSchemaUpgrade(
const messagesRequiringSchemaUpgrade = await getMessagesNeedingUpgrade(
numMessagesPerBatch,
{
BackboneMessageCollection,
count: numMessagesPerBatch,
MessageCollection: BackboneMessageCollection,
}
);
const fetchDuration = Date.now() - fetchStartTime;
@ -60,8 +61,11 @@ exports.processNext = async ({
const upgradeDuration = Date.now() - upgradeStartTime;
const saveStartTime = Date.now();
const saveMessage = _saveMessageBackbone({ BackboneMessage });
await Promise.all(upgradedMessages.map(saveMessage));
await Promise.all(
upgradedMessages.map(message =>
saveMessage(message, { Message: BackboneMessage })
)
);
const saveDuration = Date.now() - saveStartTime;
const totalDuration = Date.now() - startTime;
@ -277,11 +281,6 @@ const _processBatch = async ({
};
};
const _saveMessageBackbone = ({ BackboneMessage } = {}) => message => {
const backboneMessage = new BackboneMessage(message);
return deferredToPromise(backboneMessage.save());
};
const _saveMessage = ({ transaction } = {}) => message => {
if (!isObject(transaction)) {
throw new TypeError("'transaction' is required");
@ -295,41 +294,6 @@ const _saveMessage = ({ transaction } = {}) => message => {
});
};
const _fetchMessagesRequiringSchemaUpgrade = async ({
BackboneMessageCollection,
count,
} = {}) => {
if (!isFunction(BackboneMessageCollection)) {
throw new TypeError(
"'BackboneMessageCollection' (Whisper.MessageCollection)" +
' constructor is required'
);
}
if (!isNumber(count)) {
throw new TypeError("'count' is required");
}
const collection = new BackboneMessageCollection();
return new Promise(resolve =>
collection
.fetch({
limit: count,
index: {
name: 'schemaVersion',
upper: Message.CURRENT_SCHEMA_VERSION,
excludeUpper: true,
order: 'desc',
},
})
.always(() => {
const models = collection.models || [];
const messages = models.map(model => model.toJSON());
resolve(messages);
})
);
};
// NOTE: Named dangerous because it is not as efficient as using our
// `messages` `schemaVersion` index:
const _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex = ({

View file

@ -2,6 +2,7 @@
const Backbone = require('../../ts/backbone');
const Crypto = require('./crypto');
const Data = require('./data');
const Database = require('./database');
const Emoji = require('../../ts/util/emoji');
const Notifications = require('../../ts/notifications');
@ -205,6 +206,7 @@ exports.setup = (options = {}) => {
Backbone,
Components,
Crypto,
Data,
Database,
Emoji,
Migrations,

View file

@ -498,6 +498,8 @@ exports.createAttachmentDataWriter = ({
});
};
// TODO: need to handle attachment thumbnails and video screenshots
const messageWithoutAttachmentData = Object.assign(
{},
await writeThumbnails(message, { logger }),