Add large group send benchmark
This commit is contained in:
parent
e14356f580
commit
4dfbb25c71
4 changed files with 151 additions and 37 deletions
21
.github/workflows/benchmark.yml
vendored
21
.github/workflows/benchmark.yml
vendored
|
@ -89,9 +89,29 @@ jobs:
|
|||
env:
|
||||
NODE_ENV: production
|
||||
RUN_COUNT: 100
|
||||
CONVERSATION_SIZE: 500
|
||||
ELECTRON_ENABLE_STACK_DUMPING: on
|
||||
ARTIFACTS_DIR: artifacts/group-send
|
||||
|
||||
- name: Run large group send benchmarks with delivery receipts
|
||||
run: |
|
||||
set -o pipefail
|
||||
rm -rf /tmp/mock
|
||||
xvfb-run --auto-servernum node \
|
||||
ts/test-mock/benchmarks/group_send_bench.js | \
|
||||
tee benchmark-large-group-send.log
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
NODE_ENV: production
|
||||
GROUP_SIZE: 500
|
||||
CONTACT_COUNT: 500
|
||||
GROUP_DELIVERY_RECEIPTS: 500
|
||||
DISCARD_COUNT: 2
|
||||
RUN_COUNT: 20
|
||||
CONVERSATION_SIZE: 50
|
||||
ELECTRON_ENABLE_STACK_DUMPING: on
|
||||
ARTIFACTS_DIR: artifacts/large-group-send
|
||||
|
||||
- name: Run conversation open benchmarks
|
||||
run: |
|
||||
set -o pipefail
|
||||
|
@ -131,6 +151,7 @@ jobs:
|
|||
node ./bin/collect.js ../benchmark-startup.log data/startup.json
|
||||
node ./bin/collect.js ../benchmark-send.log data/send.json
|
||||
node ./bin/collect.js ../benchmark-group-send.log data/group-send.json
|
||||
node ./bin/collect.js ../benchmark-large-group-send.log data/large-group-send.json
|
||||
node ./bin/collect.js ../benchmark-convo-open.log data/convo-open.json
|
||||
npm run build
|
||||
git config --global user.email "no-reply@signal.org"
|
||||
|
|
|
@ -19,6 +19,18 @@ export const GROUP_SIZE = process.env.GROUP_SIZE
|
|||
? parseInt(process.env.GROUP_SIZE, 10)
|
||||
: 8;
|
||||
|
||||
export const CONTACT_COUNT = process.env.CONTACT_COUNT
|
||||
? parseInt(process.env.CONTACT_COUNT, 10)
|
||||
: 10;
|
||||
|
||||
export const CONVERSATION_SIZE = process.env.CONVERSATION_SIZE
|
||||
? parseInt(process.env.CONVERSATION_SIZE, 10)
|
||||
: 10;
|
||||
|
||||
export const GROUP_DELIVERY_RECEIPTS = process.env.GROUP_DELIVERY_RECEIPTS
|
||||
? parseInt(process.env.GROUP_DELIVERY_RECEIPTS, 10)
|
||||
: 1;
|
||||
|
||||
export const DISCARD_COUNT = process.env.DISCARD_COUNT
|
||||
? parseInt(process.env.DISCARD_COUNT, 10)
|
||||
: 5;
|
||||
|
|
|
@ -15,11 +15,14 @@ import {
|
|||
debug,
|
||||
RUN_COUNT,
|
||||
GROUP_SIZE,
|
||||
CONVERSATION_SIZE,
|
||||
DISCARD_COUNT,
|
||||
GROUP_DELIVERY_RECEIPTS,
|
||||
} from './fixtures';
|
||||
import { stats } from '../../util/benchmark/stats';
|
||||
import { sleep } from '../../util/sleep';
|
||||
import { MINUTE } from '../../util/durations';
|
||||
|
||||
const CONVERSATION_SIZE = 500; // messages
|
||||
const LAST_MESSAGE = 'start sending messages now';
|
||||
|
||||
Bootstrap.benchmark(async (bootstrap: Bootstrap): Promise<void> => {
|
||||
|
@ -46,7 +49,7 @@ Bootstrap.benchmark(async (bootstrap: Bootstrap): Promise<void> => {
|
|||
const messages = new Array<Buffer>();
|
||||
debug('encrypting');
|
||||
// Fill left pane
|
||||
for (const contact of members.slice().reverse()) {
|
||||
for (const contact of members.slice(0, CONVERSATION_SIZE).reverse()) {
|
||||
const messageTimestamp = bootstrap.getTimestamp();
|
||||
|
||||
messages.push(
|
||||
|
@ -114,17 +117,58 @@ Bootstrap.benchmark(async (bootstrap: Bootstrap): Promise<void> => {
|
|||
`>> text=${LAST_MESSAGE}`
|
||||
)
|
||||
.first();
|
||||
await item.click();
|
||||
await item.click({ timeout: 2 * MINUTE });
|
||||
}
|
||||
|
||||
const timeline = window.locator(
|
||||
'.timeline-wrapper, .Inbox__conversation .ConversationView'
|
||||
);
|
||||
|
||||
const deltaList = new Array<number>();
|
||||
const input = await app.waitForEnabledComposer();
|
||||
|
||||
function sendReceiptsInBatches({
|
||||
receipts,
|
||||
batchSize,
|
||||
nextBatchSize,
|
||||
runId,
|
||||
delay,
|
||||
}: {
|
||||
receipts: Array<Buffer>;
|
||||
batchSize: number;
|
||||
nextBatchSize: number;
|
||||
runId: number;
|
||||
delay: number;
|
||||
}) {
|
||||
const receiptsToSend = receipts.splice(0, batchSize);
|
||||
debug(`sending ${receiptsToSend.length} receipts for runId ${runId}`);
|
||||
|
||||
receiptsToSend.forEach(delivery => server.send(desktop, delivery));
|
||||
|
||||
if (receipts.length) {
|
||||
setTimeout(
|
||||
() =>
|
||||
sendReceiptsInBatches({
|
||||
receipts,
|
||||
batchSize: nextBatchSize,
|
||||
nextBatchSize,
|
||||
runId,
|
||||
delay,
|
||||
}),
|
||||
delay
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let receiptsFromPreviousMessage: Array<Buffer> = [];
|
||||
for (let runId = 0; runId < RUN_COUNT + DISCARD_COUNT; runId += 1) {
|
||||
debug('finding composition input and clicking it');
|
||||
const input = await app.waitForEnabledComposer();
|
||||
debug(`sending previous ${receiptsFromPreviousMessage.length} receipts`);
|
||||
|
||||
// deliver up to 256 receipts at once (max that server will send) and then in chunks
|
||||
// of 30 every 200ms to approximate real behavior as we acknowledge each batch
|
||||
sendReceiptsInBatches({
|
||||
receipts: receiptsFromPreviousMessage,
|
||||
batchSize: 256,
|
||||
nextBatchSize: 30,
|
||||
delay: 100,
|
||||
runId,
|
||||
});
|
||||
|
||||
debug('entering message text');
|
||||
await input.type(`my message ${runId}`);
|
||||
|
@ -139,18 +183,19 @@ Bootstrap.benchmark(async (bootstrap: Bootstrap): Promise<void> => {
|
|||
debug('waiting for timing from the app');
|
||||
const { timestamp, delta } = await app.waitForMessageSend();
|
||||
|
||||
// Sleep to allow any receipts from previous rounds to be processed
|
||||
await sleep(1000);
|
||||
|
||||
debug('sending delivery receipts');
|
||||
const delivery = await first.encryptReceipt(desktop, {
|
||||
timestamp: timestamp + 1,
|
||||
messageTimestamps: [timestamp],
|
||||
type: ReceiptType.Delivery,
|
||||
});
|
||||
|
||||
await server.send(desktop, delivery);
|
||||
|
||||
debug('waiting for message state change');
|
||||
const message = timeline.locator(`[data-testid="${timestamp}"]`);
|
||||
await message.waitFor();
|
||||
receiptsFromPreviousMessage = await Promise.all(
|
||||
members.slice(0, GROUP_DELIVERY_RECEIPTS).map(member =>
|
||||
member.encryptReceipt(desktop, {
|
||||
timestamp: timestamp + 1,
|
||||
messageTimestamps: [timestamp],
|
||||
type: ReceiptType.Delivery,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
if (runId >= DISCARD_COUNT) {
|
||||
deltaList.push(delta);
|
||||
|
|
|
@ -19,6 +19,7 @@ import { MAX_READ_KEYS as MAX_STORAGE_READ_KEYS } from '../services/storageConst
|
|||
import * as durations from '../util/durations';
|
||||
import { drop } from '../util/drop';
|
||||
import { App } from './playwright';
|
||||
import { CONTACT_COUNT } from './benchmarks/fixtures';
|
||||
|
||||
export { App };
|
||||
|
||||
|
@ -40,6 +41,10 @@ const CONTACT_FIRST_NAMES = [
|
|||
'Alice',
|
||||
'Bob',
|
||||
'Charlie',
|
||||
'Danielle',
|
||||
'Elaine',
|
||||
'Frankie',
|
||||
'Grandma',
|
||||
'Paul',
|
||||
'Steve',
|
||||
'William',
|
||||
|
@ -51,7 +56,23 @@ const CONTACT_LAST_NAMES = [
|
|||
'Miller',
|
||||
'Davis',
|
||||
'Lopez',
|
||||
'Gonazales',
|
||||
'Gonzales',
|
||||
'Singh',
|
||||
'Baker',
|
||||
'Farmer',
|
||||
];
|
||||
|
||||
const CONTACT_SUFFIXES = [
|
||||
'Sr.',
|
||||
'Jr.',
|
||||
'the 3rd',
|
||||
'the 4th',
|
||||
'the 5th',
|
||||
'the 6th',
|
||||
'the 7th',
|
||||
'the 8th',
|
||||
'the 9th',
|
||||
'the 10th',
|
||||
];
|
||||
|
||||
const CONTACT_NAMES = new Array<string>();
|
||||
|
@ -61,6 +82,14 @@ for (const firstName of CONTACT_FIRST_NAMES) {
|
|||
}
|
||||
}
|
||||
|
||||
for (const suffix of CONTACT_SUFFIXES) {
|
||||
for (const firstName of CONTACT_FIRST_NAMES) {
|
||||
for (const lastName of CONTACT_LAST_NAMES) {
|
||||
CONTACT_NAMES.push(`${firstName} ${lastName}, ${suffix}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_CONTACTS = CONTACT_NAMES.length;
|
||||
|
||||
export type BootstrapOptions = Readonly<{
|
||||
|
@ -131,7 +160,7 @@ export class Bootstrap {
|
|||
|
||||
this.options = {
|
||||
linkedDevices: 5,
|
||||
contactCount: MAX_CONTACTS,
|
||||
contactCount: CONTACT_COUNT,
|
||||
contactsWithoutProfileKey: 0,
|
||||
unknownContactCount: 0,
|
||||
contactNames: CONTACT_NAMES,
|
||||
|
@ -140,12 +169,12 @@ export class Bootstrap {
|
|||
...options,
|
||||
};
|
||||
|
||||
assert(
|
||||
const totalContactCount =
|
||||
this.options.contactCount +
|
||||
this.options.contactsWithoutProfileKey +
|
||||
this.options.unknownContactCount <=
|
||||
this.options.contactNames.length
|
||||
);
|
||||
this.options.contactsWithoutProfileKey +
|
||||
this.options.unknownContactCount;
|
||||
assert(totalContactCount <= this.options.contactNames.length);
|
||||
assert(totalContactCount <= MAX_CONTACTS);
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
|
@ -156,19 +185,26 @@ export class Bootstrap {
|
|||
const { port } = this.server.address();
|
||||
debug('started server on port=%d', port);
|
||||
|
||||
const totalContactCount =
|
||||
this.options.contactCount +
|
||||
this.options.contactsWithoutProfileKey +
|
||||
this.options.unknownContactCount;
|
||||
|
||||
const allContacts = await Promise.all(
|
||||
this.options.contactNames.map(async profileName => {
|
||||
const primary = await this.server.createPrimaryDevice({
|
||||
profileName,
|
||||
});
|
||||
this.options.contactNames
|
||||
.slice(0, totalContactCount)
|
||||
.map(async profileName => {
|
||||
const primary = await this.server.createPrimaryDevice({
|
||||
profileName,
|
||||
});
|
||||
|
||||
for (let i = 0; i < this.options.linkedDevices; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await this.server.createSecondaryDevice(primary);
|
||||
}
|
||||
for (let i = 0; i < this.options.linkedDevices; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await this.server.createSecondaryDevice(primary);
|
||||
}
|
||||
|
||||
return primary;
|
||||
})
|
||||
return primary;
|
||||
})
|
||||
);
|
||||
|
||||
this.privContacts = allContacts.splice(0, this.options.contactCount);
|
||||
|
|
Loading…
Reference in a new issue