Flush all watchers on empty queue

This commit is contained in:
Fedor Indutny 2021-03-25 17:00:03 -07:00 committed by GitHub
parent 67892d838c
commit 746e99b8c2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 317 additions and 89 deletions

View file

@ -35,6 +35,7 @@ export async function startApp(): Promise<void> {
});
window.Whisper.deliveryReceiptQueue.pause();
window.Whisper.deliveryReceiptBatcher = window.Signal.Util.createBatcher({
name: 'Whisper.deliveryReceiptBatcher',
wait: 500,
maxSize: 500,
processBatch: async (items: WhatIsThis) => {
@ -2056,7 +2057,7 @@ export async function startApp(): Promise<void> {
async function onEmpty() {
await Promise.all([
window.waitForAllBatchers(),
window.waitForAllWaitBatchers(),
window.flushAllWaitBatchers(),
]);
window.log.info('onEmpty: All outstanding database requests complete');
initialLoadComplete = true;
@ -2074,72 +2075,68 @@ export async function startApp(): Promise<void> {
logger: window.log,
});
let interval: NodeJS.Timer | null = setInterval(async () => {
const view = window.owsDesktopApp.appView;
if (view) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
clearInterval(interval!);
interval = null;
view.onEmpty();
window.logAppLoadedEvent();
if (messageReceiver) {
window.log.info(
'App loaded - messages:',
messageReceiver.getProcessedCount()
);
}
window.sqlInitializer.goBackToMainProcess();
window.Signal.Util.setBatchingStrategy(false);
const attachmentDownloadQueue = window.attachmentDownloadQueue || [];
// NOTE: ts/models/messages.ts expects this global to become undefined
// once we stop processing the queue.
window.attachmentDownloadQueue = undefined;
const MAX_ATTACHMENT_MSGS_TO_DOWNLOAD = 250;
const attachmentsToDownload = attachmentDownloadQueue.filter(
(message, index) =>
index <= MAX_ATTACHMENT_MSGS_TO_DOWNLOAD ||
isMoreRecentThan(
message.getReceivedAt(),
MAX_ATTACHMENT_DOWNLOAD_AGE
) ||
// Stickers and long text attachments has to be downloaded for UI
// to display the message properly.
message.hasRequiredAttachmentDownloads()
);
window.log.info(
'Downloading recent attachments of total attachments',
attachmentsToDownload.length,
attachmentDownloadQueue.length
);
if (window.startupProcessingQueue) {
window.startupProcessingQueue.flush();
window.startupProcessingQueue = undefined;
}
const messagesWithDownloads = await Promise.all(
attachmentsToDownload.map(message =>
message.queueAttachmentDownloads()
)
);
const messagesToSave: Array<MessageAttributesType> = [];
messagesWithDownloads.forEach((shouldSave, messageKey) => {
if (shouldSave) {
const message = attachmentsToDownload[messageKey];
messagesToSave.push(message.attributes);
}
});
await window.Signal.Data.saveMessages(messagesToSave, {});
}
}, 500);
window.Whisper.deliveryReceiptQueue.start();
window.Whisper.Notifications.enable();
const view = window.owsDesktopApp.appView;
if (!view) {
throw new Error('Expected `appView` to be initialized');
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
view.onEmpty();
window.logAppLoadedEvent();
if (messageReceiver) {
window.log.info(
'App loaded - messages:',
messageReceiver.getProcessedCount()
);
}
window.sqlInitializer.goBackToMainProcess();
window.Signal.Util.setBatchingStrategy(false);
const attachmentDownloadQueue = window.attachmentDownloadQueue || [];
// NOTE: ts/models/messages.ts expects this global to become undefined
// once we stop processing the queue.
window.attachmentDownloadQueue = undefined;
const MAX_ATTACHMENT_MSGS_TO_DOWNLOAD = 250;
const attachmentsToDownload = attachmentDownloadQueue.filter(
(message, index) =>
index <= MAX_ATTACHMENT_MSGS_TO_DOWNLOAD ||
isMoreRecentThan(
message.getReceivedAt(),
MAX_ATTACHMENT_DOWNLOAD_AGE
) ||
// Stickers and long text attachments has to be downloaded for UI
// to display the message properly.
message.hasRequiredAttachmentDownloads()
);
window.log.info(
'Downloading recent attachments of total attachments',
attachmentsToDownload.length,
attachmentDownloadQueue.length
);
if (window.startupProcessingQueue) {
window.startupProcessingQueue.flush();
window.startupProcessingQueue = undefined;
}
const messagesWithDownloads = await Promise.all(
attachmentsToDownload.map(message => message.queueAttachmentDownloads())
);
const messagesToSave: Array<MessageAttributesType> = [];
messagesWithDownloads.forEach((shouldSave, messageKey) => {
if (shouldSave) {
const message = attachmentsToDownload[messageKey];
messagesToSave.push(message.attributes);
}
});
await window.Signal.Data.saveMessages(messagesToSave, {});
}
function onReconnect() {
// We disable notifications on first connect, but the same applies to reconnect. In

View file

@ -742,6 +742,7 @@ async function getConversationById(
}
const updateConversationBatcher = createBatcher<ConversationType>({
name: 'sql.Client.updateConversationBatcher',
wait: 500,
maxSize: 20,
processBatch: async (items: Array<ConversationType>) => {

View file

@ -0,0 +1,91 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import * as sinon from 'sinon';
import { createBatcher } from '../../util/batcher';
import { sleep } from '../../util/sleep';
describe('batcher', () => {
it('should schedule a full batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createBatcher<number>({
name: 'test',
wait: 10,
maxSize: 2,
processBatch,
});
batcher.add(1);
batcher.add(2);
assert.ok(processBatch.calledOnceWith([1, 2]), 'Full batch on first call');
});
it('should schedule a partial batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createBatcher<number>({
name: 'test',
wait: 5,
maxSize: 2,
processBatch,
});
batcher.add(1);
await sleep(10);
assert.ok(processBatch.calledOnceWith([1]), 'Partial batch after timeout');
});
it('should flushAndWait a partial batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createBatcher<number>({
name: 'test',
wait: 10000,
maxSize: 1000,
processBatch,
});
batcher.add(1);
await batcher.flushAndWait();
assert.ok(
processBatch.calledOnceWith([1]),
'Partial batch after flushAndWait'
);
});
it('should flushAndWait a partial batch with new items added', async () => {
let calledTimes = 0;
const processBatch = async (batch: Array<number>): Promise<void> => {
calledTimes += 1;
if (calledTimes === 1) {
assert.deepEqual(batch, [1], 'First partial batch');
batcher.add(2);
} else if (calledTimes === 2) {
assert.deepEqual(batch, [2], 'Second partial batch');
} else {
assert.strictEqual(calledTimes, 2);
}
};
const batcher = createBatcher<number>({
name: 'test',
wait: 10000,
maxSize: 1000,
processBatch,
});
batcher.add(1);
await batcher.flushAndWait();
assert.strictEqual(calledTimes, 2);
});
});

View file

@ -0,0 +1,80 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import * as sinon from 'sinon';
import { createWaitBatcher } from '../../util/waitBatcher';
describe('waitBatcher', () => {
it('should schedule a full batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createWaitBatcher<number>({
name: 'test',
wait: 10,
maxSize: 2,
processBatch,
});
await Promise.all([batcher.add(1), batcher.add(2)]);
assert.ok(processBatch.calledOnceWith([1, 2]), 'Full batch on first call');
});
it('should schedule a partial batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createWaitBatcher<number>({
name: 'test',
wait: 10,
maxSize: 2,
processBatch,
});
await batcher.add(1);
assert.ok(processBatch.calledOnceWith([1]), 'Partial batch on timeout');
});
it('should flush a partial batch', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createWaitBatcher<number>({
name: 'test',
wait: 10000,
maxSize: 1000,
processBatch,
});
await Promise.all([batcher.add(1), batcher.flushAndWait()]);
assert.ok(
processBatch.calledOnceWith([1]),
'Partial batch on flushAndWait'
);
});
it('should flush a partial batch with new items added', async () => {
const processBatch = sinon.fake.resolves(undefined);
const batcher = createWaitBatcher<number>({
name: 'test',
wait: 10000,
maxSize: 1000,
processBatch,
});
await Promise.all([
(async () => {
await batcher.add(1);
await batcher.add(2);
})(),
batcher.flushAndWait(),
]);
assert(processBatch.firstCall.calledWith([1]), 'First partial batch');
assert(processBatch.secondCall.calledWith([2]), 'Second partial batch');
assert(!processBatch.thirdCall);
});
});

View file

@ -204,16 +204,23 @@ class MessageReceiverInner extends EventTarget {
this.appQueue = new PQueue({ concurrency: 1, timeout: 1000 * 60 * 2 });
this.cacheAddBatcher = createBatcher<CacheAddItemType>({
name: 'MessageReceiver.cacheAddBatcher',
wait: 200,
maxSize: 30,
processBatch: this.cacheAndQueueBatch.bind(this),
processBatch: (items: Array<CacheAddItemType>) => {
// Not returning the promise here because we don't want to stall
// the batch.
this.cacheAndQueueBatch(items);
},
});
this.cacheUpdateBatcher = createBatcher<CacheUpdateItemType>({
name: 'MessageReceiver.cacheUpdateBatcher',
wait: 500,
maxSize: 30,
processBatch: this.cacheUpdateBatch.bind(this),
});
this.cacheRemoveBatcher = createBatcher<string>({
name: 'MessageReceiver.cacheRemoveBatcher',
wait: 500,
maxSize: 30,
processBatch: this.cacheRemoveBatch.bind(this),
@ -507,7 +514,13 @@ class MessageReceiverInner extends EventTarget {
}
onEmpty() {
const emitEmpty = () => {
const emitEmpty = async () => {
await Promise.all([
this.cacheAddBatcher.flushAndWait(),
this.cacheUpdateBatcher.flushAndWait(),
this.cacheRemoveBatcher.flushAndWait(),
]);
window.log.info("MessageReceiver: emitting 'empty' event");
const ev = new Event('empty');
this.dispatchEvent(ev);

View file

@ -22,6 +22,7 @@ window.waitForAllBatchers = async () => {
};
export type BatcherOptionsType<ItemType> = {
name: string;
wait: number;
maxSize: number;
processBatch: (items: Array<ItemType>) => void | Promise<void>;
@ -54,18 +55,19 @@ export function createBatcher<ItemType>(
function add(item: ItemType) {
items.push(item);
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
if (items.length >= options.maxSize) {
_kickBatchOff();
} else {
if (items.length === 1) {
// Set timeout once when we just pushed the first item so that the wait
// time is bounded by `options.wait` and not extended by further pushes.
timeout = setTimeout(() => {
timeout = null;
_kickBatchOff();
}, options.wait);
} else if (items.length >= options.maxSize) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
_kickBatchOff();
}
}
@ -92,15 +94,23 @@ export function createBatcher<ItemType>(
}
async function flushAndWait() {
window.log.info(
`Flushing ${options.name} batcher items.length=${items.length}`
);
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
if (items.length) {
_kickBatchOff();
}
return onIdle();
while (anyPending()) {
_kickBatchOff();
if (queue.size > 0 || queue.pending > 0) {
// eslint-disable-next-line no-await-in-loop
await queue.onIdle();
}
}
window.log.info(`Flushing complete ${options.name} for batcher`);
}
batcher = {

View file

@ -6,6 +6,7 @@ import { createBatcher } from './batcher';
import { createWaitBatcher } from './waitBatcher';
const updateMessageBatcher = createBatcher<MessageAttributesType>({
name: 'messageBatcher.updateMessageBatcher',
wait: 500,
maxSize: 50,
processBatch: async (messageAttrs: Array<MessageAttributesType>) => {
@ -31,6 +32,7 @@ export function setBatchingStrategy(keepBatching = false): void {
}
export const saveNewMessageBatcher = createWaitBatcher<MessageAttributesType>({
name: 'messageBatcher.saveNewMessageBatcher',
wait: 500,
maxSize: 30,
processBatch: async (messageAttrs: Array<MessageAttributesType>) => {

View file

@ -12,11 +12,16 @@ declare global {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
waitBatchers: Array<BatcherType<any>>;
waitForAllWaitBatchers: () => Promise<unknown>;
flushAllWaitBatchers: () => Promise<unknown>;
}
}
window.waitBatchers = [];
window.flushAllWaitBatchers = async () => {
await Promise.all(window.waitBatchers.map(item => item.flushAndWait()));
};
window.waitForAllWaitBatchers = async () => {
await Promise.all(window.waitBatchers.map(item => item.onIdle()));
};
@ -34,6 +39,7 @@ type ExplodedPromiseType = {
};
type BatcherOptionsType<ItemType> = {
name: string;
wait: number;
maxSize: number;
processBatch: (items: Array<ItemType>) => Promise<void>;
@ -44,6 +50,7 @@ type BatcherType<ItemType> = {
anyPending: () => boolean;
onIdle: () => Promise<void>;
unregister: () => void;
flushAndWait: () => void;
};
export function createWaitBatcher<ItemType>(
@ -54,10 +61,10 @@ export function createWaitBatcher<ItemType>(
let items: Array<ItemHolderType<ItemType>> = [];
const queue = new PQueue({ concurrency: 1, timeout: 1000 * 60 * 2 });
function _kickBatchOff() {
async function _kickBatchOff() {
const itemsRef = items;
items = [];
queue.add(async () => {
await queue.add(async () => {
try {
await options.processBatch(itemsRef.map(item => item.item));
itemsRef.forEach(item => {
@ -96,19 +103,22 @@ export function createWaitBatcher<ItemType>(
item,
});
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
if (items.length >= options.maxSize) {
_kickBatchOff();
} else {
if (items.length === 1) {
// Set timeout once when we just pushed the first item so that the wait
// time is bounded by `options.wait` and not extended by further pushes.
timeout = setTimeout(() => {
timeout = null;
_kickBatchOff();
}, options.wait);
}
if (items.length >= options.maxSize) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
_kickBatchOff();
}
await promise;
}
@ -137,11 +147,35 @@ export function createWaitBatcher<ItemType>(
);
}
async function flushAndWait() {
window.log.info(
`Flushing start ${options.name} for waitBatcher ` +
`items.length=${items.length}`
);
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
while (anyPending()) {
// eslint-disable-next-line no-await-in-loop
await _kickBatchOff();
if (queue.size > 0 || queue.pending > 0) {
// eslint-disable-next-line no-await-in-loop
await queue.onIdle();
}
}
window.log.info(`Flushing complete ${options.name} for waitBatcher`);
}
waitBatcher = {
add,
anyPending,
onIdle,
unregister,
flushAndWait,
};
window.waitBatchers.push(waitBatcher);