Better handle large numbers of messages in cache on startup
This commit is contained in:
parent
102c2717cb
commit
2cfbfe477a
5 changed files with 56 additions and 21 deletions
11
app/sql.js
11
app/sql.js
|
@ -42,6 +42,7 @@ module.exports = {
|
|||
getNextExpiringMessage,
|
||||
getMessagesByConversation,
|
||||
|
||||
getUnprocessedCount,
|
||||
getAllUnprocessed,
|
||||
saveUnprocessed,
|
||||
getUnprocessedById,
|
||||
|
@ -962,6 +963,16 @@ async function getUnprocessedById(id) {
|
|||
return jsonToObject(row.json);
|
||||
}
|
||||
|
||||
async function getUnprocessedCount() {
|
||||
const row = await db.get('SELECT count(*) from unprocessed;');
|
||||
|
||||
if (!row) {
|
||||
throw new Error('getMessageCount: Unable to get count of unprocessed');
|
||||
}
|
||||
|
||||
return row['count(*)'];
|
||||
}
|
||||
|
||||
async function getAllUnprocessed() {
|
||||
const rows = await db.all(
|
||||
'SELECT json FROM unprocessed ORDER BY timestamp ASC;'
|
||||
|
|
|
@ -72,6 +72,7 @@ module.exports = {
|
|||
getNextExpiringMessage,
|
||||
getMessagesByConversation,
|
||||
|
||||
getUnprocessedCount,
|
||||
getAllUnprocessed,
|
||||
getUnprocessedById,
|
||||
saveUnprocessed,
|
||||
|
@ -458,6 +459,10 @@ async function getNextExpiringMessage({ MessageCollection }) {
|
|||
return new MessageCollection(messages);
|
||||
}
|
||||
|
||||
async function getUnprocessedCount() {
|
||||
return channels.getUnprocessedCount();
|
||||
}
|
||||
|
||||
async function getAllUnprocessed() {
|
||||
return channels.getAllUnprocessed();
|
||||
}
|
||||
|
|
|
@ -939,6 +939,9 @@
|
|||
},
|
||||
|
||||
// Not yet processed messages - for resiliency
|
||||
getUnprocessedCount() {
|
||||
return window.Signal.Data.getUnprocessedCount();
|
||||
},
|
||||
getAllUnprocessed() {
|
||||
return window.Signal.Data.getAllUnprocessed();
|
||||
},
|
||||
|
@ -959,6 +962,9 @@
|
|||
removeUnprocessed(id) {
|
||||
return window.Signal.Data.removeUnprocessed(id, { Unprocessed });
|
||||
},
|
||||
removeAllUnprocessed() {
|
||||
return window.Signal.Data.removeAllUnprocessed();
|
||||
},
|
||||
async removeAllData() {
|
||||
// First the in-memory caches:
|
||||
window.storage.reset(); // items store
|
||||
|
|
|
@ -441,38 +441,45 @@ MessageReceiver.prototype.extend({
|
|||
envelope.sourceDevice
|
||||
} ${envelope.timestamp.toNumber()}`;
|
||||
},
|
||||
getAllFromCache() {
|
||||
async getAllFromCache() {
|
||||
window.log.info('getAllFromCache');
|
||||
return textsecure.storage.unprocessed.getAll().then(items => {
|
||||
window.log.info(
|
||||
'getAllFromCache loaded',
|
||||
items.length,
|
||||
'saved envelopes'
|
||||
);
|
||||
const count = await textsecure.storage.unprocessed.getCount();
|
||||
|
||||
return Promise.all(
|
||||
_.map(items, item => {
|
||||
const attempts = 1 + (item.attempts || 0);
|
||||
if (attempts >= 5) {
|
||||
if (count > 250) {
|
||||
await textsecure.storage.unprocessed.removeAll();
|
||||
window.log.warn(
|
||||
`There were ${count} messages in cache. Deleted all instead of reprocessing`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
const items = await textsecure.storage.unprocessed.getAll();
|
||||
window.log.info('getAllFromCache loaded', items.length, 'saved envelopes');
|
||||
|
||||
return Promise.all(
|
||||
_.map(items, async item => {
|
||||
const attempts = 1 + (item.attempts || 0);
|
||||
|
||||
try {
|
||||
if (attempts >= 3) {
|
||||
window.log.warn(
|
||||
'getAllFromCache final attempt for envelope',
|
||||
item.id
|
||||
);
|
||||
return textsecure.storage.unprocessed.remove(item.id);
|
||||
await textsecure.storage.unprocessed.remove(item.id);
|
||||
} else {
|
||||
await textsecure.storage.unprocessed.save({ ...item, attempts });
|
||||
}
|
||||
return textsecure.storage.unprocessed.save({ ...item, attempts });
|
||||
})
|
||||
).then(
|
||||
() => items,
|
||||
error => {
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'getAllFromCache error updating items after load:',
|
||||
'getAllFromCache error updating item after load:',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
return items;
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return item;
|
||||
})
|
||||
);
|
||||
},
|
||||
async addToCache(envelope, plaintext) {
|
||||
const id = this.getEnvelopeId(envelope);
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
window.textsecure.storage = window.textsecure.storage || {};
|
||||
|
||||
window.textsecure.storage.unprocessed = {
|
||||
getCount() {
|
||||
return textsecure.storage.protocol.getUnprocessedCount();
|
||||
},
|
||||
getAll() {
|
||||
return textsecure.storage.protocol.getAllUnprocessed();
|
||||
},
|
||||
|
@ -24,5 +27,8 @@
|
|||
remove(id) {
|
||||
return textsecure.storage.protocol.removeUnprocessed(id);
|
||||
},
|
||||
removeAll() {
|
||||
return textsecure.storage.protocol.removeAllUnprocessed();
|
||||
},
|
||||
};
|
||||
})();
|
||||
|
|
Loading…
Reference in a new issue