Ensure that ConversationController.load is resilient to errors

This commit is contained in:
Scott Nonnenberg 2020-09-16 15:01:59 -07:00 committed by Josh Perez
parent b4e9c278d3
commit 67cb10fcae
2 changed files with 43 additions and 26 deletions

View file

@ -1139,18 +1139,28 @@
const stickerData = this.get('sticker'); const stickerData = this.get('sticker');
if (stickerData) { if (stickerData) {
const sticker = Signal.Stickers.getSticker( try {
stickerData.packId, const sticker = Signal.Stickers.getSticker(
stickerData.stickerId stickerData.packId,
); stickerData.stickerId
const { emoji } = sticker || {}; );
if (!emoji) { const { emoji } = sticker || {};
window.log.warn('Unable to get emoji for sticker'); if (!emoji) {
window.log.warn('Unable to get emoji for sticker');
}
return {
text: i18n('message--getNotificationText--stickers'),
emoji,
};
} catch (error) {
window.log.error(
'getNotificationData: sticker fetch failed',
error && error.stack ? error.stack : error
);
return {
text: i18n('message--getNotificationText--stickers'),
};
} }
return {
text: i18n('message--getNotificationText--stickers'),
emoji,
};
} }
if (this.isCallHistory()) { if (this.isCallHistory()) {

View file

@ -666,23 +666,30 @@ export class ConversationController {
await Promise.all( await Promise.all(
this._conversations.map(async conversation => { this._conversations.map(async conversation => {
// This call is important to allow Conversation models not to generate their try {
// cached props on initial construction if we're in the middle of the load // This call is important to allow Conversation models not to generate their
// from the database. Then we come back to the models when it is safe and // cached props on initial construction if we're in the middle of the load
// generate those props. // from the database. Then we come back to the models when it is safe and
conversation.generateProps(); // generate those props.
conversation.generateProps();
if (!conversation.get('lastMessage')) { if (!conversation.get('lastMessage')) {
await conversation.updateLastMessage(); await conversation.updateLastMessage();
} }
// In case a too-large draft was saved to the database // In case a too-large draft was saved to the database
const draft = conversation.get('draft'); const draft = conversation.get('draft');
if (draft && draft.length > MAX_MESSAGE_BODY_LENGTH) { if (draft && draft.length > MAX_MESSAGE_BODY_LENGTH) {
conversation.set({ conversation.set({
draft: draft.slice(0, MAX_MESSAGE_BODY_LENGTH), draft: draft.slice(0, MAX_MESSAGE_BODY_LENGTH),
}); });
updateConversation(conversation.attributes); updateConversation(conversation.attributes);
}
} catch (error) {
window.log.error(
'ConversationController.load/map: Failed to prepare a conversation',
error && error.stack ? error.stack : error
);
} }
}) })
); );