Merge branch 'main' into HEAD

This commit is contained in:
Scott Nonnenberg 2024-07-30 15:53:28 -07:00
commit fed6bbfc8b
1127 changed files with 263697 additions and 302446 deletions

View file

@ -3,7 +3,6 @@
import { assert } from 'chai';
import * as sinon from 'sinon';
import type { MainSQL } from '../../sql/main';
import { SystemTraySetting } from '../../types/SystemTraySetting';
import type { ConfigType } from '../../../app/base_config';
@ -12,18 +11,13 @@ import { SystemTraySettingCache } from '../../../app/SystemTraySettingCache';
describe('SystemTraySettingCache', () => {
let sandbox: sinon.SinonSandbox;
let sqlCallStub: sinon.SinonStub;
let configGetStub: sinon.SinonStub;
let configSetStub: sinon.SinonStub;
let sql: Pick<MainSQL, 'sqlCall'>;
let config: Pick<ConfigType, 'get' | 'set'>;
beforeEach(() => {
sandbox = sinon.createSandbox();
sqlCallStub = sandbox.stub().resolves();
sql = { sqlCall: sqlCallStub };
configGetStub = sandbox.stub().returns(undefined);
configSetStub = sandbox.stub().returns(undefined);
config = { get: configGetStub, set: configSetStub };
@ -34,46 +28,32 @@ describe('SystemTraySettingCache', () => {
});
it('returns MinimizeToAndStartInSystemTray if passed the --start-in-tray argument', async () => {
const justOneArg = new SystemTraySettingCache(
sql,
config,
['--start-in-tray'],
'1.2.3'
);
const justOneArg = new SystemTraySettingCache(config, ['--start-in-tray']);
assert.strictEqual(
await justOneArg.get(),
SystemTraySetting.MinimizeToAndStartInSystemTray
);
const bothArgs = new SystemTraySettingCache(
sql,
config,
['--start-in-tray', '--use-tray-icon'],
'1.2.3'
);
const bothArgs = new SystemTraySettingCache(config, [
'--start-in-tray',
'--use-tray-icon',
]);
assert.strictEqual(
await bothArgs.get(),
SystemTraySetting.MinimizeToAndStartInSystemTray
);
sinon.assert.notCalled(sqlCallStub);
sinon.assert.notCalled(configGetStub);
sinon.assert.notCalled(configSetStub);
});
it('returns MinimizeToSystemTray if passed the --use-tray-icon argument', async () => {
const cache = new SystemTraySettingCache(
sql,
config,
['--use-tray-icon'],
'1.2.3'
);
const cache = new SystemTraySettingCache(config, ['--use-tray-icon']);
assert.strictEqual(
await cache.get(),
SystemTraySetting.MinimizeToSystemTray
);
sinon.assert.notCalled(sqlCallStub);
sinon.assert.notCalled(configGetStub);
sinon.assert.notCalled(configSetStub);
});
@ -81,7 +61,7 @@ describe('SystemTraySettingCache', () => {
it('returns Uninitialized if system tray is supported but no preference is stored', async () => {
sandbox.stub(process, 'platform').value('win32');
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
const cache = new SystemTraySettingCache(config, []);
assert.strictEqual(await cache.get(), SystemTraySetting.Uninitialized);
assert(configGetStub.calledOnceWith('system-tray-setting'));
assert(
@ -95,9 +75,9 @@ describe('SystemTraySettingCache', () => {
it('returns Uninitialized if system tray is supported but the stored preference is invalid', async () => {
sandbox.stub(process, 'platform').value('win32');
sqlCallStub.resolves({ value: 'garbage' });
configGetStub.returns('garbage');
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
const cache = new SystemTraySettingCache(config, []);
assert.strictEqual(await cache.get(), SystemTraySetting.Uninitialized);
assert(configGetStub.calledOnceWith('system-tray-setting'));
assert(
@ -108,58 +88,26 @@ describe('SystemTraySettingCache', () => {
);
});
it('returns the stored preference if system tray is supported and something is stored', async () => {
sandbox.stub(process, 'platform').value('win32');
sqlCallStub.resolves({ value: 'MinimizeToSystemTray' });
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
assert.strictEqual(
await cache.get(),
SystemTraySetting.MinimizeToSystemTray
);
assert(configGetStub.calledOnceWith('system-tray-setting'));
assert(
configSetStub.calledOnceWith(
'system-tray-setting',
SystemTraySetting.MinimizeToSystemTray
)
);
});
it('returns the cached preference if system tray is supported and something is stored', async () => {
sandbox.stub(process, 'platform').value('win32');
configGetStub.returns('MinimizeToSystemTray');
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
const cache = new SystemTraySettingCache(config, []);
assert.strictEqual(
await cache.get(),
SystemTraySetting.MinimizeToSystemTray
);
assert(configGetStub.calledOnceWith('system-tray-setting'));
sinon.assert.notCalled(sqlCallStub);
});
it('only kicks off one request to the database if multiple sources ask at once', async () => {
sandbox.stub(process, 'platform').value('win32');
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
await Promise.all([cache.get(), cache.get(), cache.get()]);
assert(configGetStub.calledOnceWith('system-tray-setting'));
sinon.assert.calledOnce(sqlCallStub);
});
it('returns DoNotUseSystemTray if system tray is unsupported and there are no CLI flags', async () => {
sandbox.stub(process, 'platform').value('darwin');
const cache = new SystemTraySettingCache(sql, config, [], '1.2.3');
const cache = new SystemTraySettingCache(config, []);
assert.strictEqual(await cache.get(), SystemTraySetting.DoNotUseSystemTray);
sinon.assert.notCalled(configGetStub);
sinon.assert.notCalled(configSetStub);
sinon.assert.notCalled(sqlCallStub);
});
});

View file

@ -3,7 +3,7 @@
import * as path from 'path';
import { tmpdir } from 'os';
import { chmodSync, mkdirSync, unlinkSync, writeFileSync } from 'fs';
import { chmodSync, rmSync, writeFileSync, mkdtempSync } from 'fs';
import { pathExists, readJsonSync } from 'fs-extra';
import { v4 as generateGuid } from 'uuid';
@ -13,15 +13,19 @@ import type { ConfigType } from '../../../app/base_config';
import { start } from '../../../app/base_config';
describe('base_config', () => {
let targetDir: string;
let targetPath: string;
beforeEach(() => {
targetPath = path.join(tmpdir(), `${generateGuid()}.json`);
targetDir = mkdtempSync(path.join(tmpdir(), 'base_config'));
targetPath = path.join(targetDir, `${generateGuid()}.json`);
});
afterEach(() => {
try {
unlinkSync(targetPath);
chmodSync(targetDir, 0o755);
chmodSync(targetPath, 0o755);
rmSync(targetDir, { recursive: true });
} catch (err) {
assert.strictEqual(err.code, 'ENOENT');
}
@ -89,7 +93,7 @@ describe('base_config', () => {
}
writeFileSync(targetPath, JSON.stringify({ foo: 123 }));
chmodSync(targetPath, 0);
chmodSync(targetDir, 0);
const { _getCachedValue } = start({
name: 'test',
targetPath,
@ -163,7 +167,7 @@ describe('base_config', () => {
throwOnFilesystemErrors: true,
});
config.set('foo', 123);
chmodSync(targetPath, 0);
rmSync(targetDir, { recursive: true });
assert.throws(() => config.set('foo', 456));
assert.strictEqual(config.get('foo'), 123);
@ -181,7 +185,7 @@ describe('base_config', () => {
throwOnFilesystemErrors: false,
});
config.set('foo', 123);
chmodSync(targetPath, 0);
rmSync(targetDir, { recursive: true });
config.set('bar', 456);
@ -234,16 +238,13 @@ describe('base_config', () => {
// We put the config file in a directory, then remove all permissions from that
// directory. This should prevent removal.
const directory = path.join(tmpdir(), generateGuid());
const configFile = path.join(directory, 'test_config.json');
mkdirSync(directory, { recursive: true });
writeFileSync(configFile, JSON.stringify({ foo: 123 }));
writeFileSync(targetPath, JSON.stringify({ foo: 123 }));
const config = start({
name: 'test',
targetPath: configFile,
targetPath,
throwOnFilesystemErrors: true,
});
chmodSync(directory, 0);
chmodSync(targetDir, 0);
assert.throws(() => config.remove());
@ -258,16 +259,13 @@ describe('base_config', () => {
}
// See above.
const directory = path.join(tmpdir(), generateGuid());
const configFile = path.join(directory, 'test_config.json');
mkdirSync(directory, { recursive: true });
writeFileSync(configFile, JSON.stringify({ foo: 123 }));
writeFileSync(targetPath, JSON.stringify({ foo: 123 }));
const config = start({
name: 'test',
targetPath: configFile,
targetPath,
throwOnFilesystemErrors: false,
});
chmodSync(directory, 0);
chmodSync(targetDir, 0);
config.remove();

View file

@ -22,6 +22,7 @@ const setupAsNewDevice = stub();
const setupAsStandalone = stub();
const showAbout = stub();
const showDebugLog = stub();
const showCallingDevTools = stub();
const showKeyboardShortcuts = stub();
const showSettings = stub();
const showWindow = stub();
@ -70,6 +71,7 @@ const getExpectedViewMenu = (): MenuItemConstructorOptions => ({
{ label: 'Debug Log', click: showDebugLog },
{ type: 'separator' },
{ label: 'Toggle Developer Tools', role: 'toggleDevTools' },
{ label: 'Open Calling Developer Tools', click: showCallingDevTools },
{ label: 'Force Update', click: forceUpdate },
],
});
@ -227,6 +229,7 @@ describe('createTemplate', () => {
setupAsStandalone,
showAbout,
showDebugLog,
showCallingDevTools,
showKeyboardShortcuts,
showSettings,
showWindow,

View file

@ -1,21 +1,16 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as sinon from 'sinon';
import { Job } from '../../../jobs/Job';
import { generateAci } from '../../../types/ServiceId';
import { addReportSpamJob } from '../../../jobs/helpers/addReportSpamJob';
import type { ConversationType } from '../../../state/ducks/conversations';
import { getDefaultConversation } from '../../../test-both/helpers/getDefaultConversation';
describe('addReportSpamJob', () => {
let getMessageServerGuidsForSpam: sinon.SinonStub;
let jobQueue: { add: sinon.SinonStub };
const conversation = {
id: 'convo',
type: 'private' as const,
serviceId: generateAci(),
};
const conversation: ConversationType = getDefaultConversation();
beforeEach(() => {
getMessageServerGuidsForSpam = sinon.stub().resolves(['abc', 'xyz']);

View file

@ -32,7 +32,7 @@ const createMockElement = (
({
classList: new FakeTokenList([className]),
dataset,
} as unknown as HTMLElement);
}) as unknown as HTMLElement;
const createMockAtMentionElement = (
dataset: Record<string, string>

View file

@ -2,12 +2,17 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { noop } from 'lodash';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import type { ReadableDB, WritableDB } from '../../sql/Interface';
import { SCHEMA_VERSIONS } from '../../sql/migrations';
import { consoleLogger } from '../../util/consoleLogger';
export function updateToVersion(db: Database, version: number): void {
export function createDB(): WritableDB {
return new SQL(':memory:') as WritableDB;
}
export function updateToVersion(db: WritableDB, version: number): void {
const startVersion = db.pragma('user_version', { simple: true });
const silentLogger = {
@ -32,7 +37,11 @@ type TableRows = ReadonlyArray<
Record<string, string | number | null | Record<string, unknown>>
>;
export function insertData(db: Database, table: string, rows: TableRows): void {
export function insertData(
db: WritableDB,
table: string,
rows: TableRows
): void {
for (const row of rows) {
db.prepare(
`
@ -52,7 +61,7 @@ export function insertData(db: Database, table: string, rows: TableRows): void {
}
}
export function getTableData(db: Database, table: string): TableRows {
export function getTableData(db: ReadableDB, table: string): TableRows {
return db
.prepare(`SELECT * FROM ${table}`)
.all()

View file

@ -0,0 +1,109 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { WritableDB } from '../../sql/Interface';
import { migrateConversationMessages, setupTests } from '../../sql/Server';
import { createDB, insertData, getTableData } from './helpers';
describe('SQL/migrateConversationMessages', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
setupTests(db);
});
afterEach(() => {
db.close();
});
function compactify(
message: Record<string, unknown>
): Record<string, unknown> {
const { id, conversationId, json } = message;
return {
id,
conversationId,
json,
};
}
it('should leave irrelevant messages intact', () => {
insertData(db, 'messages', [
{
id: 'irrelevant',
conversationId: 'other',
json: {
conversationId: 'other',
},
},
]);
migrateConversationMessages(db, 'obsolete', 'current');
assert.deepStrictEqual(getTableData(db, 'messages').map(compactify), [
{
id: 'irrelevant',
conversationId: 'other',
json: {
conversationId: 'other',
},
},
]);
});
it('should update conversationId and send state', () => {
insertData(db, 'messages', [
{
id: 'no-send-state',
conversationId: 'obsolete',
json: {
conversationId: 'obsolete',
body: 'test',
sendStateByConversationId: {
other: 'Failed',
obsolete: 'Read',
},
editHistory: [
{
body: 'test2',
sendStateByConversationId: {
other: 'Failed',
obsolete: 'Read',
},
},
],
},
},
]);
migrateConversationMessages(db, 'obsolete', 'current');
assert.deepStrictEqual(getTableData(db, 'messages').map(compactify), [
{
id: 'no-send-state',
conversationId: 'current',
json: {
body: 'test',
conversationId: 'current',
sendStateByConversationId: {
other: 'Failed',
current: 'Read',
},
editHistory: [
{
body: 'test2',
sendStateByConversationId: {
other: 'Failed',
current: 'Read',
},
},
],
},
},
]);
});
});

View file

@ -2,21 +2,19 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { jsonToObject, sql } from '../../sql/util';
import { updateToVersion } from './helpers';
import type { MessageType } from '../../sql/Interface';
import { createDB, updateToVersion } from './helpers';
import type { WritableDB, MessageType } from '../../sql/Interface';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SeenStatus } from '../../MessageSeenStatus';
describe('SQL/updateToSchemaVersion1000', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 990);
});

View file

@ -0,0 +1,84 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { v4 as generateGuid } from 'uuid';
import { normalizeAci } from '../../util/normalizeAci';
import type { WritableDB } from '../../sql/Interface';
import { createDB, insertData, getTableData, updateToVersion } from './helpers';
describe('SQL/updateToSchemaVersion1020', () => {
let db: WritableDB;
const OUR_ACI = normalizeAci(
generateGuid(),
'updateToSchemaVersion1020 test'
);
const THEIR_ACI = normalizeAci(
generateGuid(),
'updateToSchemaVersion1020 test'
);
beforeEach(() => {
db = createDB();
updateToVersion(db, 1010);
});
afterEach(() => {
db.close();
});
it('removes self merges and nothing else', () => {
insertData(db, 'items', [
{
id: 'uuid_id',
json: {
id: 'uuid_id',
value: `${OUR_ACI}.2`,
},
},
]);
insertData(db, 'conversations', [
{
id: 'us',
serviceId: OUR_ACI,
},
{
id: 'them',
serviceId: THEIR_ACI,
},
]);
insertData(db, 'messages', [
{
id: 'a',
conversationId: 'us',
type: 'conversation-merge',
},
{
id: 'b',
conversationId: 'us',
type: 'incoming',
},
{
id: 'c',
conversationId: 'them',
type: 'conversation-merge',
},
{
id: 'd',
conversationId: 'them',
type: 'incoming',
},
]);
updateToVersion(db, 1020);
assert.deepStrictEqual(
getTableData(db, 'messages').map(m => m.id),
['b', 'c', 'd']
);
});
});

View file

@ -0,0 +1,156 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { v4 as generateGuid } from 'uuid';
import { sql } from '../../sql/util';
import { createDB, updateToVersion } from './helpers';
import type { WritableDB, MessageType } from '../../sql/Interface';
import { MessageRequestResponseEvent } from '../../types/MessageRequestResponseEvent';
describe('SQL/updateToSchemaVersion1030', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1020);
});
afterEach(() => {
db.close();
});
function createMessage(
attrs: Pick<MessageType, 'type' | 'messageRequestResponseEvent'>
): MessageType {
const message: MessageType = {
id: generateGuid(),
conversationId: generateGuid(),
received_at: Date.now(),
sent_at: Date.now(),
received_at_ms: Date.now(),
timestamp: Date.now(),
...attrs,
};
const json = JSON.stringify(message);
const [query, params] = sql`
INSERT INTO messages
(id, conversationId, type, json)
VALUES
(
${message.id},
${message.conversationId},
${message.type},
${json}
)
`;
db.prepare(query).run(params);
return message;
}
function getMessages() {
const [query] = sql`
SELECT type, json_extract(json, '$.messageRequestResponseEvent') AS event, shouldAffectActivity, shouldAffectPreview FROM messages;
`;
return db.prepare(query).all();
}
const INCLUDED_TYPES = [
'call-history',
'chat-session-refreshed',
'delivery-issue',
'group-v2-change',
'group',
'incoming',
'outgoing',
'phone-number-discovery',
'timer-notification',
'title-transition-notification',
] as const;
const EXCLUDED_TYPES = [
'change-number-notification',
'contact-removed-notification',
'conversation-merge',
'group-v1-migration',
'keychange',
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- legacy type
'message-history-unsynced' as any,
'profile-change',
'story',
'universal-timer-notification',
'verified-change',
] as const;
it('marks activity and preview correctly', () => {
for (const type of [...INCLUDED_TYPES, ...EXCLUDED_TYPES]) {
createMessage({
type,
});
}
createMessage({
type: 'message-request-response-event',
messageRequestResponseEvent: MessageRequestResponseEvent.ACCEPT,
});
createMessage({
type: 'message-request-response-event',
messageRequestResponseEvent: MessageRequestResponseEvent.BLOCK,
});
createMessage({
type: 'message-request-response-event',
messageRequestResponseEvent: MessageRequestResponseEvent.UNBLOCK,
});
createMessage({
type: 'message-request-response-event',
messageRequestResponseEvent: MessageRequestResponseEvent.SPAM,
});
updateToVersion(db, 1030);
const messages = getMessages();
assert.deepStrictEqual(messages, [
...INCLUDED_TYPES.map(type => {
return {
type,
event: null,
shouldAffectActivity: 1,
shouldAffectPreview: 1,
};
}),
...EXCLUDED_TYPES.map(type => {
return {
type,
event: null,
shouldAffectActivity: 0,
shouldAffectPreview: 0,
};
}),
{
type: 'message-request-response-event',
event: MessageRequestResponseEvent.ACCEPT,
shouldAffectActivity: 0,
shouldAffectPreview: 0,
},
{
type: 'message-request-response-event',
event: MessageRequestResponseEvent.BLOCK,
shouldAffectActivity: 0,
shouldAffectPreview: 0,
},
{
type: 'message-request-response-event',
event: MessageRequestResponseEvent.UNBLOCK,
shouldAffectActivity: 0,
shouldAffectPreview: 0,
},
{
type: 'message-request-response-event',
event: MessageRequestResponseEvent.SPAM,
shouldAffectActivity: 1,
shouldAffectPreview: 1,
},
]);
});
});

View file

@ -0,0 +1,483 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { omit } from 'lodash';
import { assert } from 'chai';
import type { ReadableDB, WritableDB } from '../../sql/Interface';
import { jsonToObject, objectToJSON, sql, sqlJoin } from '../../sql/util';
import { createDB, updateToVersion } from './helpers';
import type { LegacyAttachmentDownloadJobType } from '../../sql/migrations/1040-undownloaded-backed-up-media';
import type { AttachmentType } from '../../types/Attachment';
import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload';
import { IMAGE_JPEG } from '../../types/MIME';
function getAttachmentDownloadJobs(db: ReadableDB) {
const [query] = sql`
SELECT * FROM attachment_downloads ORDER BY receivedAt DESC;
`;
return db
.prepare(query)
.all()
.map(job => ({
...omit(job, 'attachmentJson'),
attachment: jsonToObject(job.attachmentJson),
}));
}
type UnflattenedAttachmentDownloadJobType = Omit<
AttachmentDownloadJobType,
'digest' | 'contentType' | 'size'
>;
function insertNewJob(
db: WritableDB,
job: UnflattenedAttachmentDownloadJobType,
addMessageFirst: boolean = true
): void {
if (addMessageFirst) {
try {
db.prepare('INSERT INTO messages (id) VALUES ($id)').run({
id: job.messageId,
});
} catch (e) {
// pass; message has already been inserted
}
}
const [query, params] = sql`
INSERT INTO attachment_downloads
(
messageId,
attachmentType,
attachmentJson,
digest,
contentType,
size,
receivedAt,
sentAt,
active,
attempts,
retryAfter,
lastAttemptTimestamp
)
VALUES
(
${job.messageId},
${job.attachmentType},
${objectToJSON(job.attachment)},
${job.attachment.digest},
${job.attachment.contentType},
${job.attachment.size},
${job.receivedAt},
${job.sentAt},
${job.active ? 1 : 0},
${job.attempts},
${job.retryAfter},
${job.lastAttemptTimestamp}
);
`;
db.prepare(query).run(params);
}
describe('SQL/updateToSchemaVersion1040', () => {
describe('Storing of new attachment jobs', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1040);
});
afterEach(() => {
db.close();
});
it('allows storing of new backup attachment jobs', () => {
insertNewJob(db, {
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 1970,
sentAt: 2070,
active: false,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
});
insertNewJob(db, {
messageId: 'message2',
attachmentType: 'attachment',
attachment: {
digest: 'digest2',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 1971,
sentAt: 2071,
active: false,
retryAfter: 1204,
attempts: 0,
lastAttemptTimestamp: 1004,
});
const attachments = getAttachmentDownloadJobs(db);
assert.strictEqual(attachments.length, 2);
assert.deepEqual(attachments, [
{
messageId: 'message2',
attachmentType: 'attachment',
digest: 'digest2',
contentType: IMAGE_JPEG,
size: 128,
receivedAt: 1971,
sentAt: 2071,
active: 0,
retryAfter: 1204,
attempts: 0,
lastAttemptTimestamp: 1004,
attachment: {
digest: 'digest2',
contentType: IMAGE_JPEG,
size: 128,
},
},
{
messageId: 'message1',
attachmentType: 'attachment',
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
receivedAt: 1970,
sentAt: 2070,
active: 0,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
},
]);
});
it('Respects primary key constraint', () => {
const job: UnflattenedAttachmentDownloadJobType = {
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 1970,
sentAt: 2070,
active: false,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
};
insertNewJob(db, job);
assert.throws(() => {
insertNewJob(db, { ...job, attempts: 1 });
});
const attachments = getAttachmentDownloadJobs(db);
assert.strictEqual(attachments.length, 1);
assert.strictEqual(attachments[0].attempts, 0);
});
it('uses indices searching for next job', () => {
const now = Date.now();
const job: UnflattenedAttachmentDownloadJobType = {
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 101,
sentAt: 101,
attempts: 0,
active: false,
retryAfter: null,
lastAttemptTimestamp: null,
};
insertNewJob(db, job);
insertNewJob(db, {
...job,
messageId: 'message2',
receivedAt: 102,
sentAt: 102,
retryAfter: now + 1,
lastAttemptTimestamp: now - 10,
});
insertNewJob(db, {
...job,
messageId: 'message3',
active: true,
receivedAt: 103,
sentAt: 103,
});
insertNewJob(db, {
...job,
messageId: 'message4',
attachmentType: 'contact',
receivedAt: 104,
sentAt: 104,
retryAfter: now,
lastAttemptTimestamp: now - 1000,
});
{
const [query, params] = sql`
SELECT * FROM attachment_downloads
WHERE
active = 0
AND
(retryAfter is NULL OR retryAfter <= ${now})
ORDER BY receivedAt DESC
LIMIT 5
`;
const result = db.prepare(query).all(params);
assert.strictEqual(result.length, 2);
assert.deepStrictEqual(
result.map(res => res.messageId),
['message4', 'message1']
);
const details = db
.prepare(`EXPLAIN QUERY PLAN ${query}`)
.all(params)
.map(step => step.detail)
.join(', ');
assert.include(
details,
'USING INDEX attachment_downloads_active_receivedAt'
);
assert.notInclude(details, 'TEMP B-TREE');
assert.notInclude(details, 'SCAN');
}
{
const messageIds = ['message1', 'message2', 'message4'];
const [query, params] = sql`
SELECT * FROM attachment_downloads
INDEXED BY attachment_downloads_active_messageId
WHERE
active = 0
AND
(lastAttemptTimestamp is NULL OR lastAttemptTimestamp <= ${now - 100})
AND
messageId IN (${sqlJoin(messageIds)})
ORDER BY receivedAt ASC
LIMIT 5
`;
const result = db.prepare(query).all(params);
assert.strictEqual(result.length, 2);
assert.deepStrictEqual(
result.map(res => res.messageId),
['message1', 'message4']
);
const details = db
.prepare(`EXPLAIN QUERY PLAN ${query}`)
.all(params)
.map(step => step.detail)
.join(', ');
// This query _will_ use a temp b-tree for ordering, but the number of rows
// should be quite low.
assert.include(
details,
'USING INDEX attachment_downloads_active_messageId'
);
}
});
it('respects foreign key constraint on messageId', () => {
const job: AttachmentDownloadJobType = {
messageId: 'message1',
attachmentType: 'attachment',
attachment: {
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
},
receivedAt: 1970,
digest: 'digest1',
contentType: IMAGE_JPEG,
size: 128,
sentAt: 2070,
active: false,
retryAfter: null,
attempts: 0,
lastAttemptTimestamp: null,
};
// throws if we don't add the message first
assert.throws(() => insertNewJob(db, job, false));
insertNewJob(db, job, true);
assert.strictEqual(getAttachmentDownloadJobs(db).length, 1);
// Deletes the job when the message is deleted
db.prepare('DELETE FROM messages WHERE id = $id').run({
id: job.messageId,
});
assert.strictEqual(getAttachmentDownloadJobs(db).length, 0);
});
});
describe('existing jobs are transferred', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1030);
});
afterEach(() => {
db.close();
});
it('existing rows are retained; invalid existing rows are removed', () => {
insertLegacyJob(db, {
id: 'id-1',
messageId: 'message-1',
timestamp: 1000,
attachment: {
size: 100,
contentType: 'image/png',
digest: 'digest1',
cdnKey: 'key1',
} as AttachmentType,
pending: 0,
index: 0,
type: 'attachment',
});
insertLegacyJob(db, {
id: 'invalid-1',
});
insertLegacyJob(db, {
id: 'id-2',
messageId: 'message-2',
timestamp: 1001,
attachment: {
size: 100,
contentType: 'image/jpeg',
digest: 'digest2',
cdnKey: 'key2',
} as AttachmentType,
pending: 1,
index: 2,
type: 'attachment',
attempts: 1,
});
insertLegacyJob(db, {
id: 'invalid-2',
timestamp: 1000,
attachment: { size: 100, contentType: 'image/jpeg' } as AttachmentType,
pending: 0,
index: 0,
type: 'attachment',
});
insertLegacyJob(db, {
id: 'invalid-3-no-content-type',
timestamp: 1000,
attachment: { size: 100 } as AttachmentType,
pending: 0,
index: 0,
type: 'attachment',
});
insertLegacyJob(db, {
id: 'duplicate-1',
messageId: 'message-1',
timestamp: 1000,
attachment: {
size: 100,
contentType: 'image/jpeg',
digest: 'digest1',
} as AttachmentType,
pending: 0,
index: 0,
type: 'attachment',
});
const legacyJobs = db.prepare('SELECT * FROM attachment_downloads').all();
assert.strictEqual(legacyJobs.length, 6);
updateToVersion(db, 1040);
const newJobs = getAttachmentDownloadJobs(db);
assert.strictEqual(newJobs.length, 2);
assert.deepEqual(newJobs[1], {
messageId: 'message-1',
receivedAt: 1000,
sentAt: 1000,
attachment: {
size: 100,
contentType: 'image/png',
digest: 'digest1',
cdnKey: 'key1',
},
size: 100,
contentType: 'image/png',
digest: 'digest1',
active: 0,
attempts: 0,
attachmentType: 'attachment',
lastAttemptTimestamp: null,
retryAfter: null,
});
assert.deepEqual(newJobs[0], {
messageId: 'message-2',
receivedAt: 1001,
sentAt: 1001,
attachment: {
size: 100,
contentType: 'image/jpeg',
digest: 'digest2',
cdnKey: 'key2',
},
size: 100,
contentType: 'image/jpeg',
digest: 'digest2',
active: 0,
attempts: 1,
attachmentType: 'attachment',
lastAttemptTimestamp: null,
retryAfter: null,
});
});
});
});
function insertLegacyJob(
db: WritableDB,
job: Partial<LegacyAttachmentDownloadJobType>
): void {
db.prepare('INSERT OR REPLACE INTO messages (id) VALUES ($id)').run({
id: job.messageId,
});
const [query, params] = sql`
INSERT INTO attachment_downloads
(id, timestamp, pending, json)
VALUES
(
${job.id},
${job.timestamp},
${job.pending},
${objectToJSON(job)}
);
`;
db.prepare(query).run(params);
}

View file

@ -0,0 +1,298 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { v4 as generateGuid } from 'uuid';
import {
getAllSyncTasks,
getMostRecentAddressableMessages,
removeSyncTaskById,
saveSyncTasks,
} from '../../sql/Server';
import type { WritableDB } from '../../sql/Interface';
import { insertData, updateToVersion, createDB } from './helpers';
import { MAX_SYNC_TASK_ATTEMPTS } from '../../util/syncTasks.types';
import { WEEK } from '../../util/durations';
import type { MessageAttributesType } from '../../model-types';
import type { SyncTaskType } from '../../util/syncTasks';
/* eslint-disable camelcase */
function generateMessage(json: MessageAttributesType) {
const { conversationId, received_at, sent_at, type } = json;
return {
conversationId,
json,
received_at,
sent_at,
type,
};
}
describe('SQL/updateToSchemaVersion1060', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1060);
});
afterEach(() => {
db.close();
});
describe('Addressable Messages', () => {
describe('Storing of new attachment jobs', () => {
it('returns only incoming/outgoing messages', () => {
const conversationId = generateGuid();
const otherConversationId = generateGuid();
insertData(db, 'messages', [
generateMessage({
id: '1',
conversationId,
type: 'incoming',
received_at: 1,
sent_at: 1,
timestamp: 1,
}),
generateMessage({
id: '2',
conversationId,
type: 'story',
received_at: 2,
sent_at: 2,
timestamp: 2,
}),
generateMessage({
id: '3',
conversationId,
type: 'outgoing',
received_at: 3,
sent_at: 3,
timestamp: 3,
}),
generateMessage({
id: '4',
conversationId,
type: 'group-v1-migration',
received_at: 4,
sent_at: 4,
timestamp: 4,
}),
generateMessage({
id: '5',
conversationId,
type: 'group-v2-change',
received_at: 5,
sent_at: 5,
timestamp: 5,
}),
generateMessage({
id: '6',
conversationId,
type: 'incoming',
received_at: 6,
sent_at: 6,
timestamp: 6,
}),
generateMessage({
id: '7',
conversationId,
type: 'profile-change',
received_at: 7,
sent_at: 7,
timestamp: 7,
}),
generateMessage({
id: '8',
conversationId: otherConversationId,
type: 'incoming',
received_at: 8,
sent_at: 8,
timestamp: 8,
}),
]);
const messages = getMostRecentAddressableMessages(db, conversationId);
assert.lengthOf(messages, 3);
assert.deepEqual(messages, [
{
id: '6',
conversationId,
type: 'incoming',
received_at: 6,
sent_at: 6,
timestamp: 6,
},
{
id: '3',
conversationId,
type: 'outgoing',
received_at: 3,
sent_at: 3,
timestamp: 3,
},
{
id: '1',
conversationId,
type: 'incoming',
received_at: 1,
sent_at: 1,
timestamp: 1,
},
]);
});
it('ensures that index is used for getMostRecentAddressableMessages, with storyId', () => {
const { detail } = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json FROM messages
INDEXED BY messages_by_date_addressable
WHERE
conversationId IS 'not-important' AND
isAddressableMessage = 1
ORDER BY received_at DESC, sent_at DESC
LIMIT 5;
`
)
.get();
assert.notInclude(detail, 'B-TREE');
assert.notInclude(detail, 'SCAN');
assert.include(
detail,
'SEARCH messages USING INDEX messages_by_date_addressable (conversationId=? AND isAddressableMessage=?)'
);
});
});
});
describe('Sync Tasks', () => {
it('creates tasks in bulk, and fetches all', () => {
const now = Date.now();
const expected: Array<SyncTaskType> = [
{
id: generateGuid(),
attempts: 1,
createdAt: now + 1,
data: {
jsonField: 'one',
data: 1,
},
envelopeId: 'envelope-id-1',
sentAt: 1,
type: 'delete-conversation',
},
{
id: generateGuid(),
attempts: 2,
createdAt: now + 2,
data: {
jsonField: 'two',
data: 2,
},
envelopeId: 'envelope-id-2',
sentAt: 2,
type: 'delete-conversation',
},
{
id: generateGuid(),
attempts: 3,
createdAt: now + 3,
data: {
jsonField: 'three',
data: 3,
},
envelopeId: 'envelope-id-3',
sentAt: 3,
type: 'delete-conversation',
},
];
saveSyncTasks(db, expected);
const actual = getAllSyncTasks(db);
assert.deepEqual(expected, actual, 'before delete');
removeSyncTaskById(db, expected[1].id);
const actualAfterDelete = getAllSyncTasks(db);
assert.deepEqual(
[
{ ...expected[0], attempts: 2 },
{ ...expected[2], attempts: 4 },
],
actualAfterDelete,
'after delete'
);
});
it('getAllSyncTasksSync expired tasks', () => {
const now = Date.now();
const twoWeeksAgo = now - WEEK * 2;
const expected: Array<SyncTaskType> = [
{
id: generateGuid(),
attempts: MAX_SYNC_TASK_ATTEMPTS,
createdAt: twoWeeksAgo,
data: {
jsonField: 'expired',
data: 1,
},
envelopeId: 'envelope-id-1',
sentAt: 1,
type: 'delete-conversation',
},
{
id: generateGuid(),
attempts: 2,
createdAt: twoWeeksAgo,
data: {
jsonField: 'old-but-few-attemts',
data: 2,
},
envelopeId: 'envelope-id-2',
sentAt: 2,
type: 'delete-conversation',
},
{
id: generateGuid(),
attempts: MAX_SYNC_TASK_ATTEMPTS * 2,
createdAt: now,
data: {
jsonField: 'new-but-many-attempts',
data: 3,
},
envelopeId: 'envelope-id-3',
sentAt: 3,
type: 'delete-conversation',
},
{
id: generateGuid(),
attempts: MAX_SYNC_TASK_ATTEMPTS - 1,
createdAt: now + 1,
data: {
jsonField: 'new-and-fresh',
data: 4,
},
envelopeId: 'envelope-id-4',
sentAt: 4,
type: 'delete-conversation',
},
];
saveSyncTasks(db, expected);
const actual = getAllSyncTasks(db);
assert.lengthOf(actual, 3);
assert.deepEqual([expected[1], expected[2], expected[3]], actual);
});
});
});

View file

@ -0,0 +1,164 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { v4 as generateGuid } from 'uuid';
import type { WritableDB } from '../../sql/Interface';
import { getMostRecentAddressableNondisappearingMessages } from '../../sql/Server';
import { createDB, insertData, updateToVersion } from './helpers';
import type { MessageAttributesType } from '../../model-types';
import { DurationInSeconds } from '../../util/durations/duration-in-seconds';
/* eslint-disable camelcase */
function generateMessage(json: MessageAttributesType) {
const { conversationId, expireTimer, received_at, sent_at, type } = json;
return {
conversationId,
json,
received_at,
sent_at,
expireTimer: Number(expireTimer),
type,
};
}
describe('SQL/updateToSchemaVersion1080', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1080);
});
afterEach(() => {
db.close();
});
describe('Addressable Messages', () => {
it('returns only incoming/outgoing messages', () => {
const conversationId = generateGuid();
const otherConversationId = generateGuid();
insertData(db, 'messages', [
generateMessage({
id: '1',
conversationId,
type: 'incoming',
received_at: 1,
sent_at: 1,
timestamp: 1,
}),
generateMessage({
id: '2',
conversationId,
type: 'story',
received_at: 2,
sent_at: 2,
timestamp: 2,
}),
generateMessage({
id: '3',
conversationId,
type: 'outgoing',
received_at: 3,
sent_at: 3,
timestamp: 3,
}),
generateMessage({
id: '4',
conversationId,
type: 'group-v1-migration',
received_at: 4,
sent_at: 4,
timestamp: 4,
}),
generateMessage({
id: '5',
conversationId,
type: 'group-v2-change',
received_at: 5,
sent_at: 5,
timestamp: 5,
}),
generateMessage({
id: '6',
conversationId,
type: 'incoming',
received_at: 6,
sent_at: 6,
timestamp: 6,
expireTimer: DurationInSeconds.fromMinutes(10),
}),
generateMessage({
id: '7',
conversationId,
type: 'profile-change',
received_at: 7,
sent_at: 7,
timestamp: 7,
}),
generateMessage({
id: '8',
conversationId: otherConversationId,
type: 'incoming',
received_at: 8,
sent_at: 8,
timestamp: 8,
}),
]);
const messages = getMostRecentAddressableNondisappearingMessages(
db,
conversationId
);
assert.lengthOf(messages, 2);
assert.deepEqual(messages, [
{
id: '3',
conversationId,
type: 'outgoing',
received_at: 3,
sent_at: 3,
timestamp: 3,
},
{
id: '1',
conversationId,
type: 'incoming',
received_at: 1,
sent_at: 1,
timestamp: 1,
},
]);
});
it('ensures that index is used for getMostRecentAddressableNondisappearingMessagesSync, with storyId', () => {
const { detail } = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json FROM messages
INDEXED BY messages_by_date_addressable_nondisappearing
WHERE
expireTimer IS NULL AND
conversationId IS 'not-important' AND
isAddressableMessage = 1
ORDER BY received_at DESC, sent_at DESC
LIMIT 5;
`
)
.get();
assert.notInclude(detail, 'B-TREE');
assert.notInclude(detail, 'SCAN');
assert.include(
detail,
'SEARCH messages USING INDEX messages_by_date_addressable_nondisappearing (conversationId=? AND isAddressableMessage=?)'
);
});
});
});

View file

@ -0,0 +1,55 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion } from './helpers';
describe('SQL/updateToSchemaVersion1090', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1090);
});
afterEach(() => {
db.close();
});
describe('Additional messages_on_delete indexes', () => {
it('uses index for selecting reactions by messageId', () => {
const details = db
.prepare(
`EXPLAIN QUERY PLAN
SELECT rowid FROM reactions
WHERE messageId = '123';
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH reactions USING COVERING INDEX reactions_messageId (messageId=?)'
);
});
it('uses index for selecting storyReads by storyId', () => {
const details = db
.prepare(
`EXPLAIN QUERY PLAN
DELETE FROM storyReads WHERE storyId = '123';
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH storyReads USING INDEX storyReads_storyId (storyId=?)'
);
});
});
});

View file

@ -0,0 +1,80 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { findLast } from 'lodash';
import type { WritableDB } from '../../sql/Interface';
import { markAllCallHistoryRead } from '../../sql/Server';
import { SeenStatus } from '../../MessageSeenStatus';
import { CallMode } from '../../types/Calling';
import {
CallDirection,
CallType,
DirectCallStatus,
} from '../../types/CallDisposition';
import { strictAssert } from '../../util/assert';
import { createDB, insertData, updateToVersion } from './helpers';
describe('SQL/updateToSchemaVersion1100', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1100);
});
afterEach(() => {
db.close();
});
describe('Optimize markAllCallHistoryReadInConversation', () => {
it('is fast', () => {
const COUNT = 10_000;
const messages = Array.from({ length: COUNT }, (_, index) => {
return {
id: `test-message-${index}`,
type: 'call-history',
seenStatus: SeenStatus.Unseen,
conversationId: `test-conversation-${index % 30}`,
sent_at: index,
json: {
callId: `test-call-${index}`,
},
};
});
const callsHistory = Array.from({ length: COUNT }, (_, index) => {
return {
callId: `test-call-${index}`,
peerId: `test-conversation-${index % 30}`,
timestamp: index,
ringerId: null,
mode: CallMode.Direct,
type: CallType.Video,
direction: CallDirection.Incoming,
status: DirectCallStatus.Missed,
};
});
insertData(db, 'messages', messages);
insertData(db, 'callsHistory', callsHistory);
const latestCallInConversation = findLast(callsHistory, call => {
return call.peerId === 'test-conversation-0';
});
strictAssert(latestCallInConversation, 'missing latest call');
const target = {
timestamp: latestCallInConversation.timestamp,
callId: latestCallInConversation.callId,
peerId: latestCallInConversation.peerId,
};
const start = performance.now();
markAllCallHistoryRead(db, target, true);
const end = performance.now();
assert.isBelow(end - start, 50);
});
});
});

View file

@ -0,0 +1,52 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion } from './helpers';
describe('SQL/updateToSchemaVersion1120', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1120);
});
afterEach(() => {
db.close();
});
it('uses index for deleting edited messages', () => {
const details = db
.prepare(
`EXPLAIN QUERY PLAN
DELETE FROM edited_messages WHERE messageId = 'messageId';
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH edited_messages USING COVERING INDEX edited_messages_messageId (messageId=?)'
);
});
it('uses index for deleting mentions', () => {
const details = db
.prepare(
`EXPLAIN QUERY PLAN
DELETE FROM mentions WHERE messageId = 'messageId';
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH mentions USING COVERING INDEX mentions_messageId (messageId=?)'
);
});
});

View file

@ -0,0 +1,145 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion } from './helpers';
describe('SQL/updateToSchemaVersion1130', () => {
let db: WritableDB;
beforeEach(() => {
db = createDB();
updateToVersion(db, 1130);
});
afterEach(() => {
db.close();
});
it('uses new index for getAllStories query and no params', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json, id
FROM messages
WHERE
isStory = 1 AND
(NULL IS NULL OR conversationId IS NULL) AND
(NULL IS NULL OR sourceServiceId IS NULL)
ORDER BY received_at ASC, sent_at ASC;
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory');
});
it('uses new index for getAllStories query and with conversationId', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json, id
FROM messages
WHERE
isStory = 1 AND
('something' IS NULL OR conversationId IS 'something') AND
(NULL IS NULL OR sourceServiceId IS NULL)
ORDER BY received_at ASC, sent_at ASC;
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory');
});
it('uses new index for getAllStories query and with sourceServiceId', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json, id
FROM messages
WHERE
isStory = 1 AND
(NULL IS NULL OR conversationId IS NULL) AND
('something' IS NULL OR sourceServiceId IS 'something')
ORDER BY received_at ASC, sent_at ASC;
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory');
});
it('uses new index for getAllStories query and both params', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT json, id
FROM messages
WHERE
isStory = 1 AND
('something' IS NULL OR conversationId IS 'something') AND
('something' IS NULL OR sourceServiceId IS 'something')
ORDER BY received_at ASC, sent_at ASC;
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory');
});
it('uses previous index for getAllStories get replies query', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT DISTINCT storyId
FROM messages
WHERE storyId IS NOT NULL
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH messages USING COVERING INDEX messages_by_storyId (storyId>?)'
);
});
it('uses previous index for getAllStories get replies from self query', () => {
const details = db
.prepare(
`
EXPLAIN QUERY PLAN
SELECT DISTINCT storyId
FROM messages
WHERE (
storyId IS NOT NULL AND
type IS 'outgoing'
)
`
)
.all()
.map(step => step.detail)
.join(', ');
assert.strictEqual(
details,
'SEARCH messages USING INDEX messages_by_storyId (storyId>?)'
);
});
});

View file

@ -2,12 +2,10 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { range } from 'lodash';
import { insertData, updateToVersion } from './helpers';
import { createDB, insertData, updateToVersion } from './helpers';
import type {
AciString,
PniString,
@ -16,6 +14,7 @@ import type {
import { normalizePni } from '../../types/ServiceId';
import { normalizeAci } from '../../util/normalizeAci';
import type {
WritableDB,
KyberPreKeyType,
PreKeyType,
SignedPreKeyType,
@ -49,7 +48,7 @@ type TestingSignedKey = Omit<
};
describe('SQL/updateToSchemaVersion87(cleanup)', () => {
let db: Database;
let db: WritableDB;
const OUR_ACI = normalizeAci(
generateGuid(),
@ -62,7 +61,7 @@ describe('SQL/updateToSchemaVersion87(cleanup)', () => {
let idCount = 0;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 86);
});

View file

@ -2,11 +2,10 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { updateToVersion, insertData, getTableData } from './helpers';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion, insertData, getTableData } from './helpers';
const CONVO_ID = generateGuid();
const GROUP_ID = generateGuid();
@ -17,10 +16,10 @@ const OUR_PNI = generateGuid();
const THEIR_UUID = generateGuid();
describe('SQL/updateToSchemaVersion88', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 86);
insertData(db, 'items', [

View file

@ -2,8 +2,6 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { jsonToObject, sql } from '../../sql/util';
@ -22,14 +20,14 @@ import type {
} from '../../sql/migrations/89-call-history';
import { getCallIdFromEra } from '../../util/callDisposition';
import { isValidUuid } from '../../util/isValidUuid';
import { updateToVersion } from './helpers';
import type { MessageType } from '../../sql/Interface';
import { createDB, updateToVersion } from './helpers';
import type { WritableDB, MessageType } from '../../sql/Interface';
describe('SQL/updateToSchemaVersion89', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 88);
});

View file

@ -1,17 +1,16 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { assert } from 'chai';
import { updateToVersion, insertData, getTableData } from './helpers';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion, insertData, getTableData } from './helpers';
describe('SQL/updateToSchemaVersion90', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
});
afterEach(() => {

View file

@ -2,16 +2,14 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { range } from 'lodash';
import { getTableData, insertData, updateToVersion } from './helpers';
import { createDB, getTableData, insertData, updateToVersion } from './helpers';
import type { ServiceIdString } from '../../types/ServiceId';
import { normalizePni } from '../../types/ServiceId';
import { normalizeAci } from '../../util/normalizeAci';
import type { PreKeyType } from '../../sql/Interface';
import type { WritableDB, PreKeyType } from '../../sql/Interface';
type TestingPreKey = Omit<
PreKeyType,
@ -21,7 +19,7 @@ type TestingPreKey = Omit<
};
describe('SQL/updateToSchemaVersion91', () => {
let db: Database;
let db: WritableDB;
const OUR_ACI = normalizeAci(generateGuid(), 'updateToSchemaVersion91 test');
const OUR_PNI = normalizePni(
@ -31,7 +29,7 @@ describe('SQL/updateToSchemaVersion91', () => {
let idCount = 0;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 90);
});

View file

@ -2,16 +2,18 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { range } from 'lodash';
import { insertData, updateToVersion } from './helpers';
import { createDB, insertData, updateToVersion } from './helpers';
import type { ServiceIdString } from '../../types/ServiceId';
import { normalizePni } from '../../types/ServiceId';
import { normalizeAci } from '../../util/normalizeAci';
import type { KyberPreKeyType, SignedPreKeyType } from '../../sql/Interface';
import type {
WritableDB,
KyberPreKeyType,
SignedPreKeyType,
} from '../../sql/Interface';
type TestingKyberKey = Omit<
KyberPreKeyType,
@ -27,7 +29,7 @@ type TestingSignedKey = Omit<
};
describe('SQL/updateToSchemaVersion92', () => {
let db: Database;
let db: WritableDB;
const OUR_ACI = normalizeAci(generateGuid(), 'updateToSchemaVersion92 test');
const OUR_PNI = normalizePni(
@ -37,7 +39,7 @@ describe('SQL/updateToSchemaVersion92', () => {
let idCount = 0;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 91);
});

View file

@ -2,11 +2,10 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import { updateToVersion, insertData, getTableData } from './helpers';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion, insertData, getTableData } from './helpers';
const CONVO_ID = generateGuid();
const OUR_ACI = generateGuid();
@ -14,10 +13,10 @@ const OUR_UNPREFIXED_PNI = generateGuid();
const OUR_PREFIXED_PNI = `PNI:${OUR_UNPREFIXED_PNI}`;
describe('SQL/updateToSchemaVersion960', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 950);
insertData(db, 'items', [

View file

@ -2,16 +2,15 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { updateToVersion, insertData, getTableData } from './helpers';
import type { WritableDB } from '../../sql/Interface';
import { createDB, updateToVersion, insertData, getTableData } from './helpers';
describe('SQL/updateToSchemaVersion990', () => {
let db: Database;
let db: WritableDB;
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
updateToVersion(db, 980);
});

View file

@ -2,27 +2,22 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { Database } from '@signalapp/better-sqlite3';
import SQL from '@signalapp/better-sqlite3';
import { v4 as generateGuid } from 'uuid';
import {
_storyIdPredicate,
getJobsInQueueSync,
insertJobSync,
} from '../../sql/Server';
import { _storyIdPredicate, getJobsInQueue, insertJob } from '../../sql/Server';
import type { WritableDB } from '../../sql/Interface';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SeenStatus } from '../../MessageSeenStatus';
import { objectToJSON, sql, sqlJoin } from '../../sql/util';
import { BodyRange } from '../../types/BodyRange';
import type { AciString } from '../../types/ServiceId';
import { generateAci } from '../../types/ServiceId';
import { updateToVersion } from './helpers';
import { createDB, updateToVersion } from './helpers';
const OUR_UUID = generateGuid();
describe('SQL migrations test', () => {
let db: Database;
let db: WritableDB;
const addOurUuid = () => {
const value = {
@ -71,7 +66,7 @@ describe('SQL migrations test', () => {
};
beforeEach(() => {
db = new SQL(':memory:');
db = createDB();
});
afterEach(() => {
@ -1409,7 +1404,7 @@ describe('SQL migrations test', () => {
const CONVERSATION_ID_1 = generateGuid();
const CONVERSATION_ID_2 = generateGuid();
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'reactions',
@ -1417,7 +1412,7 @@ describe('SQL migrations test', () => {
messageId: MESSAGE_ID_1,
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'reactions',
@ -1425,12 +1420,12 @@ describe('SQL migrations test', () => {
messageId: MESSAGE_ID_2,
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3-missing-data',
timestamp: 3,
queueType: 'reactions',
});
insertJobSync(db, {
insertJob(db, {
id: 'id-4-non-string-messageId',
timestamp: 1,
queueType: 'reactions',
@ -1438,7 +1433,7 @@ describe('SQL migrations test', () => {
messageId: 4,
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-5-missing-message',
timestamp: 5,
queueType: 'reactions',
@ -1446,7 +1441,7 @@ describe('SQL migrations test', () => {
messageId: 'missing',
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-6-missing-conversation',
timestamp: 6,
queueType: 'reactions',
@ -1490,7 +1485,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(reactionJobs.get(), 0, 'reaction jobs after');
assert.strictEqual(conversationJobs.get(), 2, 'conversation jobs after');
const jobs = getJobsInQueueSync(db, 'conversation');
const jobs = getJobsInQueue(db, 'conversation');
assert.deepEqual(jobs, [
{
@ -1525,7 +1520,7 @@ describe('SQL migrations test', () => {
const CONVERSATION_ID_1 = generateGuid();
const CONVERSATION_ID_2 = generateGuid();
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'normal send',
@ -1534,7 +1529,7 @@ describe('SQL migrations test', () => {
messageId: MESSAGE_ID_1,
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'normal send',
@ -1543,7 +1538,7 @@ describe('SQL migrations test', () => {
messageId: MESSAGE_ID_2,
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3-missing-data',
timestamp: 3,
queueType: 'normal send',
@ -1567,7 +1562,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(normalSend.get(), 0, 'normal send jobs after');
assert.strictEqual(conversationJobs.get(), 2, 'conversation jobs after');
const jobs = getJobsInQueueSync(db, 'conversation');
const jobs = getJobsInQueue(db, 'conversation');
assert.deepEqual(jobs, [
{
@ -1742,7 +1737,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(totalJobs.get(), 2, 'after total');
assert.strictEqual(reportSpamJobs.get(), 1, 'after report spam');
const jobs = getJobsInQueueSync(db, 'report spam');
const jobs = getJobsInQueue(db, 'report spam');
assert.deepEqual(jobs, [
{
@ -2494,13 +2489,13 @@ describe('SQL migrations test', () => {
`
);
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'random job',
data: {},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'delivery receipts',
@ -2509,7 +2504,7 @@ describe('SQL migrations test', () => {
deliveryReceipts: [],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3',
timestamp: 3,
queueType: 'read receipts',
@ -2518,7 +2513,7 @@ describe('SQL migrations test', () => {
readReceipts: [],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-4',
timestamp: 4,
queueType: 'viewed receipts',
@ -2527,7 +2522,7 @@ describe('SQL migrations test', () => {
viewedReceipt: {},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-5',
timestamp: 5,
queueType: 'conversation',
@ -2577,7 +2572,7 @@ describe('SQL migrations test', () => {
const CONVERSATION_ID_1 = generateGuid();
const CONVERSATION_ID_2 = generateGuid();
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'delivery receipts',
@ -2591,7 +2586,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'delivery receipts',
@ -2605,12 +2600,12 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3-missing-data',
timestamp: 3,
queueType: 'delivery receipts',
});
insertJobSync(db, {
insertJob(db, {
id: 'id-4-non-string-messageId',
timestamp: 4,
queueType: 'delivery receipts',
@ -2624,7 +2619,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-5-missing-message',
timestamp: 5,
queueType: 'delivery receipts',
@ -2638,7 +2633,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-6-missing-conversation',
timestamp: 6,
queueType: 'delivery receipts',
@ -2652,7 +2647,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-7-missing-delivery-receipts',
timestamp: 7,
queueType: 'delivery receipts',
@ -2698,7 +2693,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(conversationJobs.get(), 2, 'conversation jobs after');
assert.strictEqual(deliveryJobs.get(), 0, 'delivery jobs after');
const jobs = getJobsInQueueSync(db, 'conversation');
const jobs = getJobsInQueue(db, 'conversation');
assert.deepEqual(jobs, [
{
@ -2748,7 +2743,7 @@ describe('SQL migrations test', () => {
const CONVERSATION_ID_1 = generateGuid();
const CONVERSATION_ID_2 = generateGuid();
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'read receipts',
@ -2762,7 +2757,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'read receipts',
@ -2776,12 +2771,12 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3-missing-data',
timestamp: 3,
queueType: 'read receipts',
});
insertJobSync(db, {
insertJob(db, {
id: 'id-4-non-string-messageId',
timestamp: 4,
queueType: 'read receipts',
@ -2795,7 +2790,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-5-missing-message',
timestamp: 5,
queueType: 'read receipts',
@ -2809,7 +2804,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-6-missing-conversation',
timestamp: 6,
queueType: 'read receipts',
@ -2823,7 +2818,7 @@ describe('SQL migrations test', () => {
],
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-7-missing-read-receipts',
timestamp: 7,
queueType: 'read receipts',
@ -2867,7 +2862,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(conversationJobs.get(), 2, 'conversation jobs after');
assert.strictEqual(readJobs.get(), 0, 'read jobs after');
const jobs = getJobsInQueueSync(db, 'conversation');
const jobs = getJobsInQueue(db, 'conversation');
assert.deepEqual(jobs, [
{
@ -2917,7 +2912,7 @@ describe('SQL migrations test', () => {
const CONVERSATION_ID_1 = generateGuid();
const CONVERSATION_ID_2 = generateGuid();
insertJobSync(db, {
insertJob(db, {
id: 'id-1',
timestamp: 1,
queueType: 'viewed receipts',
@ -2929,7 +2924,7 @@ describe('SQL migrations test', () => {
},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-2',
timestamp: 2,
queueType: 'viewed receipts',
@ -2941,12 +2936,12 @@ describe('SQL migrations test', () => {
},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-3-missing-data',
timestamp: 3,
queueType: 'viewed receipts',
});
insertJobSync(db, {
insertJob(db, {
id: 'id-4-non-string-messageId',
timestamp: 4,
queueType: 'viewed receipts',
@ -2958,7 +2953,7 @@ describe('SQL migrations test', () => {
},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-5-missing-message',
timestamp: 5,
queueType: 'viewed receipts',
@ -2970,7 +2965,7 @@ describe('SQL migrations test', () => {
},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-6-missing-conversation',
timestamp: 6,
queueType: 'viewed receipts',
@ -2982,7 +2977,7 @@ describe('SQL migrations test', () => {
},
},
});
insertJobSync(db, {
insertJob(db, {
id: 'id-7-missing-viewed-receipt',
timestamp: 7,
queueType: 'viewed receipts',
@ -3028,7 +3023,7 @@ describe('SQL migrations test', () => {
assert.strictEqual(conversationJobs.get(), 2, 'conversation jobs after');
assert.strictEqual(viewedJobs.get(), 0, 'viewed jobs after');
const jobs = getJobsInQueueSync(db, 'conversation');
const jobs = getJobsInQueue(db, 'conversation');
assert.deepEqual(jobs, [
{

View file

@ -13,6 +13,14 @@ import { fakeAttachment } from '../../test-both/helpers/fakeAttachment';
import { DAY } from '../../util/durations';
import { migrateDataToFileSystem } from '../../util/attachments/migrateDataToFilesystem';
const FAKE_LOCAL_ATTACHMENT: Attachment.LocalAttachmentV2Type = {
version: 2,
size: 1,
plaintextHash: 'bogus',
path: 'fake',
localKey: 'absent',
};
describe('Attachment', () => {
describe('getFileExtension', () => {
it('should return file extension from content type', () => {
@ -418,18 +426,15 @@ describe('Attachment', () => {
};
const expected = {
...FAKE_LOCAL_ATTACHMENT,
contentType: MIME.IMAGE_JPEG,
path: 'abc/abcdefgh123456789',
fileName: 'foo.jpg',
plaintextHash:
'9dac71e94805b04964a99011d74da584301362712570e98354d535c3cd3fdfca',
size: 1111,
};
const expectedAttachmentData = Bytes.fromString('Above us only sky');
const writeNewAttachmentData = async (attachmentData: Uint8Array) => {
assert.deepEqual(attachmentData, expectedAttachmentData);
return 'abc/abcdefgh123456789';
return FAKE_LOCAL_ATTACHMENT;
};
const actual = await migrateDataToFileSystem(input, {
@ -452,7 +457,7 @@ describe('Attachment', () => {
size: 1111,
};
const writeNewAttachmentData = async () => 'abc/abcdefgh123456789';
const writeNewAttachmentData = async () => FAKE_LOCAL_ATTACHMENT;
const actual = await migrateDataToFileSystem(input, {
writeNewAttachmentData,
@ -470,7 +475,7 @@ describe('Attachment', () => {
size: 1111,
};
const writeNewAttachmentData = async () => 'abc/abcdefgh123456789';
const writeNewAttachmentData = async () => FAKE_LOCAL_ATTACHMENT;
const actual = await migrateDataToFileSystem(input, {
writeNewAttachmentData,

View file

@ -113,7 +113,6 @@ describe('Contact', () => {
const regionCode = '1';
const firstNumber = '+1202555000';
const serviceId = undefined;
const getAbsoluteAttachmentPath = (path: string) => `absolute:${path}`;
it('eliminates avatar if it has had an attachment download error', () => {
const contact = {
@ -147,7 +146,6 @@ describe('Contact', () => {
regionCode,
firstNumber,
serviceId,
getAbsoluteAttachmentPath,
});
assert.deepEqual(actual, expected);
});
@ -192,12 +190,11 @@ describe('Contact', () => {
regionCode,
firstNumber,
serviceId,
getAbsoluteAttachmentPath,
});
assert.deepEqual(actual, expected);
});
it('calculates absolute path', () => {
it('calculates local url', () => {
const fullAci = generateAci();
const contact = {
@ -225,7 +222,7 @@ describe('Contact', () => {
avatar: {
isProfile: true,
avatar: fakeAttachment({
path: 'absolute:somewhere',
path: 'attachment://v1/somewhere?size=10304&contentType=image%2Fgif',
contentType: IMAGE_GIF,
}),
},
@ -237,7 +234,6 @@ describe('Contact', () => {
regionCode,
firstNumber,
serviceId: fullAci,
getAbsoluteAttachmentPath,
});
assert.deepEqual(actual, expected);
});

View file

@ -11,9 +11,20 @@ import * as MIME from '../../types/MIME';
import type { EmbeddedContactType } from '../../types/EmbeddedContact';
import type { MessageAttributesType } from '../../model-types.d';
import type { AttachmentType } from '../../types/Attachment';
import type {
AddressableAttachmentType,
LocalAttachmentV2Type,
} from '../../types/Attachment';
import type { LoggerType } from '../../types/Logging';
const FAKE_LOCAL_ATTACHMENT: LocalAttachmentV2Type = {
version: 2,
size: 1,
plaintextHash: 'bogus',
path: 'fake',
localKey: 'absent',
};
describe('Message', () => {
const logger: LoggerType = {
warn: () => null,
@ -42,9 +53,6 @@ describe('Message', () => {
props?: Partial<Message.ContextType>
): Message.ContextType {
return {
getAbsoluteAttachmentPath: (_path: string) =>
'fake-absolute-attachment-path',
getAbsoluteStickerPath: (_path: string) => 'fake-absolute-sticker-path',
getImageDimensions: async (_params: {
objectUrl: string;
logger: LoggerType;
@ -70,218 +78,26 @@ describe('Message', () => {
logger: LoggerType;
}) => new Blob(),
revokeObjectUrl: (_objectUrl: string) => undefined,
writeNewAttachmentData: async (_data: Uint8Array) =>
'fake-attachment-path',
writeNewStickerData: async (_data: Uint8Array) => 'fake-sticker-path',
readAttachmentData: async (
attachment: Partial<AddressableAttachmentType>
): Promise<Uint8Array> => {
assert.strictEqual(attachment.version, 2);
return Buffer.from('old data');
},
writeNewAttachmentData: async (_data: Uint8Array) => {
return FAKE_LOCAL_ATTACHMENT;
},
writeNewStickerData: async (_data: Uint8Array) => ({
version: 2,
path: 'fake-sticker-path',
size: 1,
localKey: '123',
plaintextHash: 'hash',
}),
deleteOnDisk: async (_path: string) => undefined,
...props,
};
}
const writeExistingAttachmentData = () => Promise.resolve('path');
describe('createAttachmentDataWriter', () => {
it('should ignore messages that didnt go through attachment migration', async () => {
const input = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 2,
});
const expected = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 2,
});
const actual = await Message.createAttachmentDataWriter({
writeExistingAttachmentData,
logger,
})(input);
assert.deepEqual(actual, expected);
});
it('should ignore messages without attachments', async () => {
const input = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
});
const expected = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
});
const actual = await Message.createAttachmentDataWriter({
writeExistingAttachmentData,
logger,
})(input);
assert.deepEqual(actual, expected);
});
it('should write attachments to file system on original path', async () => {
const input = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [
{
contentType: MIME.IMAGE_GIF,
size: 3534,
path: 'ab/abcdefghi',
data: Bytes.fromString('Its easy if you try'),
},
],
});
const expected = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [
{
contentType: MIME.IMAGE_GIF,
size: 3534,
path: 'ab/abcdefghi',
},
],
contact: [],
preview: [],
});
// eslint-disable-next-line @typescript-eslint/no-shadow
const writeExistingAttachmentData = async (
attachment: Pick<AttachmentType, 'data' | 'path'>
) => {
assert.equal(attachment.path, 'ab/abcdefghi');
assert.strictEqual(
Bytes.toString(attachment.data || new Uint8Array()),
'Its easy if you try'
);
return 'path';
};
const actual = await Message.createAttachmentDataWriter({
writeExistingAttachmentData,
logger,
})(input);
assert.deepEqual(actual, expected);
});
it('should process quote attachment thumbnails', async () => {
const input = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
quote: {
id: 3523,
isViewOnce: false,
messageId: 'some-message-id',
referencedMessageNotFound: false,
attachments: [
{
thumbnail: {
path: 'ab/abcdefghi',
data: Bytes.fromString('Its easy if you try'),
},
},
],
},
});
const expected = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
quote: {
id: 3523,
isViewOnce: false,
messageId: 'some-message-id',
referencedMessageNotFound: false,
attachments: [
{
thumbnail: {
path: 'ab/abcdefghi',
},
},
],
},
contact: [],
preview: [],
});
// eslint-disable-next-line @typescript-eslint/no-shadow
const writeExistingAttachmentData = async (
attachment: Pick<AttachmentType, 'data' | 'path'>
) => {
assert.equal(attachment.path, 'ab/abcdefghi');
assert.strictEqual(
Bytes.toString(attachment.data || new Uint8Array()),
'Its easy if you try'
);
return 'path';
};
const actual = await Message.createAttachmentDataWriter({
writeExistingAttachmentData,
logger,
})(input);
assert.deepEqual(actual, expected);
});
it('should process contact avatars', async () => {
const input = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
contact: [
{
name: { givenName: 'john' },
avatar: {
isProfile: false,
avatar: {
contentType: MIME.IMAGE_PNG,
size: 47,
path: 'ab/abcdefghi',
data: Bytes.fromString('Its easy if you try'),
},
},
},
],
});
const expected = getDefaultMessage({
body: 'Imagine there is no heaven…',
schemaVersion: 4,
attachments: [],
contact: [
{
name: { givenName: 'john' },
avatar: {
isProfile: false,
avatar: {
contentType: MIME.IMAGE_PNG,
size: 47,
path: 'ab/abcdefghi',
},
},
},
],
preview: [],
});
// eslint-disable-next-line @typescript-eslint/no-shadow
const writeExistingAttachmentData = async (
attachment: Pick<AttachmentType, 'data' | 'path'>
) => {
assert.equal(attachment.path, 'ab/abcdefghi');
assert.strictEqual(
Bytes.toString(attachment.data || new Uint8Array()),
'Its easy if you try'
);
return 'path';
};
const actual = await Message.createAttachmentDataWriter({
writeExistingAttachmentData,
logger,
})(input);
assert.deepEqual(actual, expected);
return 'path';
});
});
describe('initializeSchemaVersion', () => {
it('should ignore messages with previously inherited schema', () => {
@ -372,13 +188,10 @@ describe('Message', () => {
const expected = getDefaultMessage({
attachments: [
{
...FAKE_LOCAL_ATTACHMENT,
contentType: MIME.AUDIO_AAC,
flags: 1,
path: 'abc/abcdefg',
fileName: 'test\uFFFDfig.exe',
size: 1111,
plaintextHash:
'f191b44995ef464dbf1943bc686008c08e95dab78cbdfe7bb5e257a8214d5b15',
},
],
hasAttachments: 1,
@ -395,7 +208,7 @@ describe('Message', () => {
Bytes.toString(attachmentData),
expectedAttachmentData
);
return 'abc/abcdefg';
return FAKE_LOCAL_ATTACHMENT;
},
});
const actual = await Message.upgradeSchema(input, context);
@ -707,7 +520,7 @@ describe('Message', () => {
attachments: [
{
fileName: 'manifesto.txt',
contentType: 'text/plain',
contentType: MIME.TEXT_ATTACHMENT,
},
],
id: 34233,
@ -726,7 +539,7 @@ describe('Message', () => {
it('does not eliminate thumbnails with missing data field', async () => {
const upgradeAttachment = sinon
.stub()
.returns({ fileName: 'processed!' });
.returns({ contentType: MIME.IMAGE_GIF, size: 42 });
const upgradeVersion = Message._mapQuotedAttachments(upgradeAttachment);
const message = getDefaultMessage({
@ -736,9 +549,10 @@ describe('Message', () => {
attachments: [
{
fileName: 'cat.gif',
contentType: 'image/gif',
contentType: MIME.IMAGE_GIF,
thumbnail: {
fileName: 'not yet downloaded!',
contentType: MIME.IMAGE_GIF,
size: 128,
},
},
],
@ -754,10 +568,11 @@ describe('Message', () => {
text: 'hey!',
attachments: [
{
contentType: 'image/gif',
contentType: MIME.IMAGE_GIF,
fileName: 'cat.gif',
thumbnail: {
fileName: 'processed!',
contentType: MIME.IMAGE_GIF,
size: 42,
},
},
],
@ -777,6 +592,8 @@ describe('Message', () => {
it('calls provided async function for each quoted attachment', async () => {
const upgradeAttachment = sinon.stub().resolves({
path: '/new/path/on/disk',
contentType: MIME.TEXT_ATTACHMENT,
size: 100,
});
const upgradeVersion = Message._mapQuotedAttachments(upgradeAttachment);
@ -786,8 +603,11 @@ describe('Message', () => {
text: 'hey!',
attachments: [
{
contentType: MIME.TEXT_ATTACHMENT,
thumbnail: {
data: 'data is here',
contentType: MIME.TEXT_ATTACHMENT,
size: 100,
data: Buffer.from('data is here'),
},
},
],
@ -803,7 +623,10 @@ describe('Message', () => {
text: 'hey!',
attachments: [
{
contentType: MIME.TEXT_ATTACHMENT,
thumbnail: {
contentType: MIME.TEXT_ATTACHMENT,
size: 100,
path: '/new/path/on/disk',
},
},

View file

@ -7,6 +7,7 @@ import { assert } from 'chai';
import { getOSFunctions } from '../../util/os/shared';
import * as Settings from '../../types/Settings';
import { SystemTraySetting } from '../../types/SystemTraySetting';
describe('Settings', () => {
let sandbox: Sinon.SinonSandbox;
@ -128,26 +129,59 @@ describe('Settings', () => {
it('returns false on macOS', () => {
sandbox.stub(process, 'platform').value('darwin');
const OS = getOSFunctions(os.release());
assert.isFalse(Settings.isSystemTraySupported(OS, '1.2.3'));
assert.isFalse(Settings.isSystemTraySupported(OS));
});
it('returns true on Windows 8', () => {
sandbox.stub(process, 'platform').value('win32');
sandbox.stub(os, 'release').returns('8.0.0');
const OS = getOSFunctions(os.release());
assert.isTrue(Settings.isSystemTraySupported(OS, '1.2.3'));
assert.isTrue(Settings.isSystemTraySupported(OS));
});
it('returns false on Linux production', () => {
it('returns true on Linux', () => {
sandbox.stub(process, 'platform').value('linux');
const OS = getOSFunctions(os.release());
assert.isFalse(Settings.isSystemTraySupported(OS, '1.2.3'));
assert.isTrue(Settings.isSystemTraySupported(OS));
});
});
describe('getDefaultSystemTraySetting', () => {
it('returns DoNotUseSystemTray is unsupported OS', () => {
sandbox.stub(process, 'platform').value('darwin');
const OS = getOSFunctions(os.release());
assert.strictEqual(
Settings.getDefaultSystemTraySetting(OS, '1.2.3'),
SystemTraySetting.DoNotUseSystemTray
);
});
it('returns true on Linux beta', () => {
it('returns MinimizeToSystemTray on Windows 8', () => {
sandbox.stub(process, 'platform').value('win32');
sandbox.stub(os, 'release').returns('8.0.0');
const OS = getOSFunctions(os.release());
assert.strictEqual(
Settings.getDefaultSystemTraySetting(OS, '1.2.3'),
SystemTraySetting.MinimizeToSystemTray
);
});
it('returns MinimizeToSystemTray on Linux Beta', () => {
sandbox.stub(process, 'platform').value('linux');
const OS = getOSFunctions(os.release());
assert.isTrue(Settings.isSystemTraySupported(OS, '1.2.3-beta.4'));
assert.strictEqual(
Settings.getDefaultSystemTraySetting(OS, '1.2.3-beta.1'),
SystemTraySetting.MinimizeToSystemTray
);
});
it('returns DoNotUseSystemTray on Linux Prod', () => {
sandbox.stub(process, 'platform').value('linux');
const OS = getOSFunctions(os.release());
assert.strictEqual(
Settings.getDefaultSystemTraySetting(OS, '1.2.3'),
SystemTraySetting.DoNotUseSystemTray
);
});
});
});

View file

@ -338,7 +338,7 @@ describe('updater/differential', () => {
await assert.isRejected(
download(outFile, data, {
gotOptions: {
...getGotOptions(),
...(await getGotOptions()),
timeout: {
connect: 0.5 * durations.SECOND,
lookup: 0.5 * durations.SECOND,

View file

@ -0,0 +1,132 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { Readable, Writable } from 'stream';
import { pipeline } from 'stream/promises';
import { BufferWriter } from 'protobufjs';
import { DelimitedStream } from '../../util/DelimitedStream';
describe('DelimitedStream', () => {
function collect(out: Array<string>): Writable {
return new Writable({
write(data, _enc, callback) {
out.push(data.toString());
callback(null);
},
});
}
async function strideTest(
data: Uint8Array,
result: ReadonlyArray<string>
): Promise<void> {
// Just to keep reasonable run times
const decrease = Math.max(1, Math.round(data.length / 256));
for (let stride = data.length; stride > 0; stride -= decrease) {
const out = new Array<string>();
// eslint-disable-next-line no-await-in-loop
await pipeline(
Readable.from(
(function* () {
for (let offset = 0; offset < data.length; offset += stride) {
yield data.slice(offset, offset + stride);
}
})()
),
new DelimitedStream(),
collect(out)
);
assert.deepStrictEqual(out, result, `Stride: ${stride}`);
}
}
it('should parse single-byte delimited data', async () => {
const w = new BufferWriter();
w.string('a');
w.string('bc');
await strideTest(w.finish(), ['a', 'bc']);
});
it('should parse two-byte delimited data', async () => {
const w = new BufferWriter();
w.string('a'.repeat(129));
w.string('b'.repeat(154));
await strideTest(w.finish(), ['a'.repeat(129), 'b'.repeat(154)]);
});
it('should parse three-byte delimited data', async () => {
const w = new BufferWriter();
w.string('a'.repeat(32000));
w.string('b'.repeat(32500));
await strideTest(w.finish(), ['a'.repeat(32000), 'b'.repeat(32500)]);
});
it('should parse mixed delimited data', async () => {
const w = new BufferWriter();
w.string('a');
w.string('b'.repeat(129));
w.string('c'.repeat(32000));
w.string('d'.repeat(32));
w.string('e'.repeat(415));
w.string('f'.repeat(33321));
await strideTest(w.finish(), [
'a',
'b'.repeat(129),
'c'.repeat(32000),
'd'.repeat(32),
'e'.repeat(415),
'f'.repeat(33321),
]);
});
it('should error on incomplete prefix', async () => {
const w = new BufferWriter();
w.string('a'.repeat(32000));
const out = new Array<string>();
await assert.isRejected(
pipeline(
Readable.from(w.finish().slice(0, 1)),
new DelimitedStream(),
collect(out)
),
'Unfinished prefix'
);
});
it('should error on incomplete data', async () => {
const w = new BufferWriter();
w.string('a'.repeat(32000));
const out = new Array<string>();
await assert.isRejected(
pipeline(
Readable.from(w.finish().slice(0, 10)),
new DelimitedStream(),
collect(out)
),
'Unfinished data'
);
});
it('should error on prefix overflow', async () => {
const out = new Array<string>();
await assert.isRejected(
pipeline(
Readable.from(Buffer.from([0xff, 0xff, 0xff, 0xff, 0xff])),
new DelimitedStream(),
collect(out)
),
'Delimiter encoding overflow'
);
});
});

View file

@ -0,0 +1,80 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { PeekInfo } from '@signalapp/ringrtc';
import uuid from 'uuid';
import {
getPeerIdFromConversation,
getCallIdFromEra,
getGroupCallMeta,
} from '../../util/callDisposition';
import {
getDefaultConversation,
getDefaultGroup,
} from '../../test-both/helpers/getDefaultConversation';
import { uuidToBytes } from '../../util/uuidToBytes';
const MOCK_ERA = 'abc';
const MOCK_CALL_ID = '16919744041952114874';
const MOCK_PEEK_INFO_BASE: PeekInfo = {
devices: [],
deviceCount: 0,
deviceCountIncludingPendingDevices: 0,
deviceCountExcludingPendingDevices: 0,
pendingUsers: [],
};
describe('utils/callDisposition', () => {
describe('getCallIdFromEra', () => {
it('returns callId from era', () => {
// just to ensure the mock is correct
assert.strictEqual(getCallIdFromEra(MOCK_ERA), MOCK_CALL_ID);
});
});
describe('getGroupCallMeta', () => {
it('returns null if missing eraId or creator', () => {
assert.isNull(getGroupCallMeta({ ...MOCK_PEEK_INFO_BASE }));
assert.isNull(
getGroupCallMeta({ ...MOCK_PEEK_INFO_BASE, eraId: MOCK_ERA })
);
assert.isNull(
getGroupCallMeta({
...MOCK_PEEK_INFO_BASE,
creator: Buffer.from(uuidToBytes(uuid())),
})
);
});
it('returns group call meta when all fields are provided', () => {
const id = uuid();
assert.deepStrictEqual(
getGroupCallMeta({
...MOCK_PEEK_INFO_BASE,
eraId: MOCK_ERA,
creator: Buffer.from(uuidToBytes(id)),
}),
{ callId: MOCK_CALL_ID, ringerId: id }
);
});
});
describe('getPeerIdFromConversation', () => {
it('returns serviceId for direct conversation', () => {
const conversation = getDefaultConversation();
assert.strictEqual(
getPeerIdFromConversation(conversation),
conversation.serviceId
);
});
it('returns groupId for group conversation', () => {
const conversation = getDefaultGroup();
assert.strictEqual(
getPeerIdFromConversation(conversation),
conversation.groupId
);
});
});
});

View file

@ -0,0 +1,40 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { Readable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import { finalStream } from '../../util/finalStream';
describe('finalStream', () => {
it('should invoke callback before pipeline resolves', async () => {
let called = false;
await pipeline(
Readable.from(['abc']),
finalStream(async () => {
// Forcing next tick
await Promise.resolve();
called = true;
})
);
assert.isTrue(called);
});
it('should propagate errors from callback', async () => {
await assert.isRejected(
pipeline(
Readable.from(['abc']),
finalStream(async () => {
// Forcing next tick
await Promise.resolve();
throw new Error('failure');
})
),
'failure'
);
});
});

View file

@ -0,0 +1,22 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import assert from 'node:assert/strict';
import { resolveCanonicalLocales } from '../../util/resolveCanonicalLocales';
describe('resolveCanonicalLocales', () => {
it('returns an array of canonical locales', () => {
assert.deepEqual(
resolveCanonicalLocales(['EN', 'EN-US', 'EN-GB', 'FR', 'FR-FR']),
['en', 'en-US', 'en-GB', 'fr', 'fr-FR']
);
});
it('removes invalid locales', () => {
assert.deepEqual(resolveCanonicalLocales(['!@#$', 'POSIX', 'en']), ['en']);
});
it('defaults to en if no valid locales are provided', () => {
assert.deepEqual(resolveCanonicalLocales(['!@#$']), ['en']);
});
});

View file

@ -0,0 +1,42 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { rgbToHSL } from '../../util/rgbToHSL';
describe('rgbToHSL', () => {
it('converts pure rgb colors', () => {
assert.deepStrictEqual(rgbToHSL(255, 0, 0), {
h: 0,
s: 1,
l: 0.5,
});
assert.deepStrictEqual(rgbToHSL(0, 255, 0), {
h: 120,
s: 1,
l: 0.5,
});
assert.deepStrictEqual(rgbToHSL(0, 0, 255), {
h: 240,
s: 1,
l: 0.5,
});
});
it('converts random sampled rgb colors', () => {
assert.deepStrictEqual(rgbToHSL(27, 132, 116), {
h: 170.85714285714283,
s: 0.6603773584905662,
l: 0.31176470588235294,
});
assert.deepStrictEqual(rgbToHSL(27, 175, 82), {
h: 142.2972972972973,
s: 0.7326732673267328,
l: 0.396078431372549,
});
});
});

View file

@ -6,53 +6,13 @@ import { assert } from 'chai';
import { normalizeGroupCallTimestamp } from '../../../util/ringrtc/normalizeGroupCallTimestamp';
describe('normalizeGroupCallTimestamp', () => {
it('returns undefined if passed NaN', () => {
assert.isUndefined(normalizeGroupCallTimestamp(NaN));
});
it('returns undefined if passed 0', () => {
assert.isUndefined(normalizeGroupCallTimestamp(0));
assert.isUndefined(normalizeGroupCallTimestamp(-0));
});
it('returns undefined if passed a negative number', () => {
assert.isUndefined(normalizeGroupCallTimestamp(-1));
assert.isUndefined(normalizeGroupCallTimestamp(-123));
});
it('returns undefined if passed a string that cannot be parsed as a number', () => {
assert.isUndefined(normalizeGroupCallTimestamp(''));
assert.isUndefined(normalizeGroupCallTimestamp('uhhh'));
});
it('returns undefined if passed a BigInt of 0', () => {
assert.isUndefined(normalizeGroupCallTimestamp(BigInt(0)));
});
it('returns undefined if passed a negative BigInt', () => {
assert.isUndefined(normalizeGroupCallTimestamp(BigInt(-1)));
assert.isUndefined(normalizeGroupCallTimestamp(BigInt(-123)));
});
it('returns undefined if passed a non-parseable type', () => {
[
undefined,
null,
{},
[],
[123],
Symbol('123'),
{ [Symbol.toPrimitive]: () => 123 },
// eslint-disable-next-line no-new-wrappers
new Number(123),
].forEach(value => {
assert.isUndefined(normalizeGroupCallTimestamp(value));
});
});
it('returns positive numbers passed in', () => {
assert.strictEqual(normalizeGroupCallTimestamp(1), 1);
assert.strictEqual(normalizeGroupCallTimestamp(123), 123);
it('returns undefined if passed 0', () => {
assert.isUndefined(normalizeGroupCallTimestamp('0'));
});
it('parses strings as numbers', () => {
@ -66,9 +26,4 @@ describe('normalizeGroupCallTimestamp', () => {
123456789012345
);
});
it('converts positive BigInts to numbers', () => {
assert.strictEqual(normalizeGroupCallTimestamp(BigInt(1)), 1);
assert.strictEqual(normalizeGroupCallTimestamp(BigInt(123)), 123);
});
});

View file

@ -142,16 +142,6 @@ describe('signalRoutes', () => {
check(`sgnl://signal.link/call#key=${foo}`, result);
});
it('artAuth', () => {
const result: ParsedSignalRoute = {
key: 'artAuth',
args: { token: foo, pubKey: foo },
};
const check = createCheck({ hasWebUrl: false });
check(`sgnl://art-auth/?token=${foo}&pub_key=${foo}`, result);
check(`sgnl://art-auth?token=${foo}&pub_key=${foo}`, result);
});
it('artAddStickers', () => {
const result: ParsedSignalRoute = {
key: 'artAddStickers',

View file

@ -0,0 +1,39 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import assert from 'node:assert/strict';
import { unicodeSlice } from '../../util/unicodeSlice';
import { byteLength } from '../../Bytes';
describe('unicodeSlice()', () => {
function test(
title: string,
input: string,
begin: number,
end: number,
expected: string,
expectedSize: number
): void {
it(title, () => {
const result = unicodeSlice(input, begin, end);
assert.strictEqual(result, expected);
assert.strictEqual(byteLength(result), expectedSize);
});
}
test('one-byte chars', '123456', 2, 4, '34', 2);
test('past max length', '123456', 0, 100, '123456', 6);
test('end before start', '123456', 5, 1, '', 0);
test('negative start', '123456', -5, 4, '1234', 4);
test('negative end', '123456', 0, -5, '', 0);
test('end at start', '123456', 3, 3, '', 0);
test('multi-byte char', 'x€x', 1, 4, '€', 3);
test('multi-byte char slice before end', '€', 1, 3, '', 0);
test('multi-byte char slice after start', '€', 2, 4, '', 0);
test('emoji', 'x👩👩👧👦x', 1, 26, '👩‍👩‍👧‍👦', 25);
test('emoji slice before end', 'x👩👩👧👦x', 1, 25, '', 0);
test('emoji slice after start', 'x👩👩👧👦x', 2, 26, '', 0);
test('emoji slice capture around', 'x👩👩👧👦x', 0, 27, 'x👩👩👧👦x', 27);
});

View file

@ -0,0 +1,135 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { EventEmitter, once } from 'events';
import { Readable } from 'stream';
import { createServer } from 'http';
import type {
IncomingMessage,
ServerResponse,
Server,
OutgoingHttpHeaders,
} from 'http';
import { strictAssert } from '../../../util/assert';
export type NextResponse = Readonly<{
status: number;
headers: OutgoingHttpHeaders;
}>;
export type LastRequestData = Readonly<{
method?: string;
url?: string;
headers: OutgoingHttpHeaders;
body: Buffer;
}>;
export class TestServer extends EventEmitter {
#server: Server;
#nextResponse: NextResponse = { status: 200, headers: {} };
#lastRequest: { request: IncomingMessage; body: Buffer } | null = null;
constructor() {
super();
this.#server = createServer(this.#onRequest);
}
async listen(): Promise<void> {
await new Promise<void>(resolve => {
this.#server.listen(0, resolve);
});
}
closeLastRequest(): void {
this.#lastRequest?.request.destroy();
}
async closeServer(): Promise<void> {
if (!this.#server.listening) {
return;
}
this.#server.closeAllConnections();
await new Promise<void>((resolve, reject) => {
this.#server.close(error => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
}
get endpoint(): string {
const address = this.#server.address();
strictAssert(
typeof address === 'object' && address != null,
'address must be an object'
);
return `http://localhost:${address.port}/`;
}
respondWith(status: number, headers: OutgoingHttpHeaders = {}): void {
this.#nextResponse = { status, headers };
}
lastRequest(): LastRequestData | null {
const request = this.#lastRequest;
if (request == null) {
return null;
}
return {
method: request.request.method,
url: request.request.url,
headers: request.request.headers,
body: request.body,
};
}
#onRequest = (request: IncomingMessage, response: ServerResponse) => {
this.emit('request');
const nextResponse = this.#nextResponse;
const lastRequest = { request, body: Buffer.alloc(0) };
this.#lastRequest = lastRequest;
request.on('data', chunk => {
lastRequest.body = Buffer.concat([lastRequest.body, chunk]);
this.emit('data');
});
request.on('end', () => {
response.writeHead(nextResponse.status, nextResponse.headers);
this.#nextResponse = { status: 200, headers: {} };
response.end();
});
request.on('error', error => {
response.destroy(error);
});
};
}
export function body(
server: TestServer,
steps: () => AsyncIterator<Uint8Array, void, number>
): Readable {
const iter = steps();
let first = true;
return new Readable({
async read(size: number) {
try {
// To make tests more reliable, we want each `yield` in body() to be
// processed before we yield the next chunk.
if (first) {
first = false;
} else {
await once(server, 'data');
}
const chunk = await iter.next(size);
if (chunk.done) {
this.push(null);
return;
}
this.push(chunk.value);
} catch (error) {
this.destroy(error);
}
},
});
}

View file

@ -0,0 +1,461 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
import { assert, expect } from 'chai';
import {
_getUploadMetadataHeader,
_tusCreateWithUploadRequest,
_tusGetCurrentOffsetRequest,
_tusResumeUploadRequest,
tusUpload,
} from '../../../util/uploads/tusProtocol';
import { TestServer, body } from './helpers';
import { toLogFormat } from '../../../types/errors';
describe('tusProtocol', () => {
describe('_getUploadMetadataHeader', () => {
it('creates key value pairs, with base 64 values', () => {
assert.strictEqual(_getUploadMetadataHeader({}), '');
assert.strictEqual(
_getUploadMetadataHeader({
one: 'first',
}),
'one Zmlyc3Q='
);
assert.strictEqual(
_getUploadMetadataHeader({
one: 'first',
two: 'second',
}),
'one Zmlyc3Q=,two c2Vjb25k'
);
});
});
describe('_tusCreateWithUploadRequest', () => {
let server: TestServer;
beforeEach(async () => {
server = new TestServer();
await server.listen();
});
afterEach(async () => {
await server.closeServer();
});
it('uploads on create', async () => {
server.respondWith(200, {});
const result = await _tusCreateWithUploadRequest({
endpoint: server.endpoint,
headers: {
'custom-header': 'custom-value',
},
fileName: 'test',
fileSize: 6,
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
yield new Uint8Array([4, 5, 6]);
}),
});
assert.strictEqual(result, true);
assert.strictEqual(server.lastRequest()?.body.byteLength, 6);
assert.strictEqual(
server.lastRequest()?.body.toString('hex'),
'010203040506'
);
assert.strictEqual(server.lastRequest()?.method, 'POST');
assert.deepOwnInclude(server.lastRequest()?.headers, {
'tus-resumable': '1.0.0',
'upload-length': '6',
'upload-metadata': 'filename dGVzdA==',
'content-type': 'application/offset+octet-stream',
'custom-header': 'custom-value',
});
});
it('gracefully handles server connection closing', async () => {
const result = await _tusCreateWithUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
fileSize: 0,
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
await server.closeServer();
yield new Uint8Array([4, 5, 6]);
}),
});
assert.strictEqual(result, false);
assert.strictEqual(server.lastRequest()?.body.byteLength, 3);
assert.strictEqual(server.lastRequest()?.body.toString('hex'), '010203');
});
it('gracefully handles being aborted', async () => {
const controller = new AbortController();
const result = await _tusCreateWithUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
fileSize: 0,
signal: controller.signal,
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
controller.abort();
yield new Uint8Array([4, 5, 6]);
}),
});
assert.strictEqual(result, false);
assert.strictEqual(server.lastRequest()?.body.byteLength, 3);
assert.strictEqual(server.lastRequest()?.body.toString('hex'), '010203');
});
it('reports progress', async () => {
let progress = 0;
const result = await _tusCreateWithUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
fileSize: 6,
onProgress: bytesUploaded => {
progress = bytesUploaded;
},
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
assert.strictEqual(progress, 3);
yield new Uint8Array([4, 5, 6]);
assert.strictEqual(progress, 6);
}),
});
assert.strictEqual(result, true);
});
it('reports caught errors', async () => {
let caughtError: Error | undefined;
const result = await _tusCreateWithUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
fileSize: 6,
onCaughtError: error => {
caughtError = error;
},
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
throw new Error('test');
}),
});
assert.strictEqual(result, false);
assert.strictEqual(caughtError?.message, 'test');
});
});
describe('_tusGetCurrentOffsetRequest', () => {
let server: TestServer;
beforeEach(async () => {
server = new TestServer();
await server.listen();
});
afterEach(async () => {
await server.closeServer();
});
it('returns the current offset', async () => {
server.respondWith(200, { 'Upload-Offset': '3' });
const result = await _tusGetCurrentOffsetRequest({
endpoint: server.endpoint,
headers: {
'custom-header': 'custom-value',
},
fileName: 'test',
});
assert.strictEqual(result, 3);
assert.strictEqual(server.lastRequest()?.method, 'HEAD');
assert.deepOwnInclude(server.lastRequest()?.headers, {
'tus-resumable': '1.0.0',
'custom-header': 'custom-value',
});
});
it('throws on missing offset', async () => {
server.respondWith(200, {});
await assert.isRejected(
_tusGetCurrentOffsetRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
}),
'getCurrentState: Missing Upload-Offset header'
);
});
it('throws on invalid offset', async () => {
server.respondWith(200, { 'Upload-Offset': '-1' });
await assert.isRejected(
_tusGetCurrentOffsetRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
}),
'getCurrentState: Invalid Upload-Offset (-1)'
);
});
});
describe('_tusResumeUploadRequest', () => {
let server: TestServer;
beforeEach(async () => {
server = new TestServer();
await server.listen();
});
afterEach(async () => {
await server.closeServer();
});
it('uploads on resume', async () => {
server.respondWith(200, {});
const result = await _tusResumeUploadRequest({
endpoint: server.endpoint,
headers: {
'custom-header': 'custom-value',
},
fileName: 'test',
uploadOffset: 3,
readable: body(server, async function* () {
// we're resuming from offset 3
yield new Uint8Array([3, 4, 5]);
yield new Uint8Array([6, 7, 8]);
}),
});
assert.strictEqual(result, true);
assert.strictEqual(server.lastRequest()?.body.byteLength, 6);
assert.strictEqual(
server.lastRequest()?.body.toString('hex'),
'030405060708'
);
assert.deepOwnInclude(server.lastRequest()?.headers, {
'tus-resumable': '1.0.0',
'upload-offset': '3',
'content-type': 'application/offset+octet-stream',
'custom-header': 'custom-value',
});
});
it('gracefully handles server connection closing', async () => {
const result = await _tusResumeUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
uploadOffset: 3,
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
await server.closeServer();
yield new Uint8Array([4, 5, 6]);
}),
});
assert.strictEqual(result, false);
assert.strictEqual(server.lastRequest()?.body.byteLength, 3);
assert.strictEqual(server.lastRequest()?.body.toString('hex'), '010203');
});
it('gracefully handles being aborted', async () => {
const controller = new AbortController();
const result = await _tusResumeUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
uploadOffset: 3,
signal: controller.signal,
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
controller.abort();
yield new Uint8Array([4, 5, 6]);
}),
});
assert.strictEqual(result, false);
assert.strictEqual(server.lastRequest()?.body.byteLength, 3);
assert.strictEqual(server.lastRequest()?.body.toString('hex'), '010203');
});
it('reports progress', async () => {
let progress = 0;
const result = await _tusResumeUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
uploadOffset: 3,
onProgress: bytesUploaded => {
progress = bytesUploaded;
},
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
assert.strictEqual(progress, 3);
yield new Uint8Array([4, 5, 6]);
assert.strictEqual(progress, 6);
}),
});
assert.strictEqual(result, true);
});
it('reports caught errors', async () => {
let caughtError: Error | undefined;
const result = await _tusResumeUploadRequest({
endpoint: server.endpoint,
headers: {},
fileName: 'test',
uploadOffset: 3,
onCaughtError: error => {
caughtError = error;
},
readable: body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
throw new Error('test');
}),
});
assert.strictEqual(result, false);
assert.strictEqual(caughtError?.message, 'test');
});
});
describe('tusUpload', () => {
let server: TestServer;
function assertSocketCloseError(error: unknown) {
// There isn't an equivalent to this chain in assert()
expect(error, toLogFormat(error))
.property('code')
.oneOf(['ECONNRESET', 'UND_ERR_SOCKET']);
}
beforeEach(async () => {
server = new TestServer();
await server.listen();
});
afterEach(async () => {
await server.closeServer();
});
it('creates and uploads', async () => {
server.respondWith(200, {});
await tusUpload({
endpoint: server.endpoint,
headers: { 'mock-header': 'mock-value' },
fileName: 'mock-file-name',
filePath: 'mock-file-path',
fileSize: 6,
onCaughtError: assertSocketCloseError,
reader: (filePath, offset) => {
assert.strictEqual(offset, undefined);
assert.strictEqual(filePath, 'mock-file-path');
return body(server, async function* () {
yield new Uint8Array([1, 2, 3]);
yield new Uint8Array([4, 5, 6]);
});
},
});
assert.strictEqual(server.lastRequest()?.body.byteLength, 6);
assert.deepOwnInclude(server.lastRequest()?.headers, {
'upload-metadata': 'filename bW9jay1maWxlLW5hbWU=',
'mock-header': 'mock-value',
});
});
it('resumes when initial request fails', async () => {
let cursor = undefined as number | void;
let callCount = 0;
const file = new Uint8Array([1, 2, 3, 4, 5, 6]);
await tusUpload({
endpoint: server.endpoint,
headers: { 'mock-header': 'mock-value' },
fileName: 'mock-file-name',
filePath: 'mock-file-path',
fileSize: file.byteLength,
onCaughtError: assertSocketCloseError,
reader: (_filePath, offset) => {
callCount += 1;
assert.strictEqual(offset, cursor);
if (offset != null) {
// Ensure we're checking the offset on the HEAD request on every
// iteration after the first.
assert.strictEqual(server.lastRequest()?.method, 'HEAD');
}
return body(server, async function* () {
cursor = cursor ?? 0;
const nextChunk = file.subarray(cursor, (cursor += 2));
if (offset === undefined) {
// Stage 1: Create and upload
yield nextChunk;
server.closeLastRequest();
assert.deepOwnInclude(server.lastRequest(), {
method: 'POST',
body: nextChunk,
});
} else if (offset === 2) {
// Stage 2: Resume
yield nextChunk;
server.closeLastRequest();
assert.deepOwnInclude(server.lastRequest(), {
method: 'PATCH',
body: nextChunk,
});
} else if (offset === 4) {
// Stage 3: Keep looping
yield nextChunk;
// Closing even though this is the last one so we have to check
// HEAD one last time.
server.closeLastRequest();
assert.deepOwnInclude(server.lastRequest(), {
method: 'PATCH',
body: nextChunk,
});
} else {
assert.fail('Unexpected offset');
}
server.respondWith(200, { 'Upload-Offset': cursor });
});
},
});
// Last request should have checked length and seen it was done.
assert.strictEqual(server.lastRequest()?.method, 'HEAD');
assert.strictEqual(callCount, 3);
});
it('should resume from wherever the server says it got to', async () => {
let nextExpectedOffset = undefined as number | void;
let callCount = 0;
const file = new Uint8Array([1, 2, 3, 4, 5, 6]);
await tusUpload({
endpoint: server.endpoint,
headers: { 'mock-header': 'mock-value' },
fileName: 'mock-file-name',
filePath: 'mock-file-path',
fileSize: file.byteLength,
onCaughtError: assertSocketCloseError,
reader: (_filePath, offset) => {
callCount += 1;
assert.strictEqual(offset, nextExpectedOffset);
return body(server, async function* () {
if (offset === undefined) {
yield file.subarray(0, 3);
yield file.subarray(3, 6);
nextExpectedOffset = 3;
server.closeLastRequest();
// For this test lets pretend this as far as we were able to save
server.respondWith(200, { 'Upload-Offset': 3 });
} else if (offset === 3) {
yield file.subarray(3, 6);
} else {
assert.fail('Unexpected offset');
}
});
},
});
assert.strictEqual(callCount, 2);
});
});
});