Convert logging infrastructure to TypeScript

This commit is contained in:
Evan Hahn 2021-01-27 15:13:33 -06:00 committed by GitHub
parent 10ace53845
commit a8787e7c9e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 595 additions and 331 deletions

14
ts/firstline.d.ts vendored Normal file
View file

@ -0,0 +1,14 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
declare module 'firstline' {
interface FirstLineOpts {
encoding?: BufferEncoding;
lineEnding?: '\n';
}
export default function firstLine(
filePath: string,
opts?: FirstLineOpts
): Promise<string>;
}

View file

@ -0,0 +1,317 @@
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
// NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`:
/* eslint-disable more/no-then */
/* eslint-disable no-console */
import * as path from 'path';
import * as fs from 'fs';
import { app, ipcMain as ipc } from 'electron';
import * as bunyan from 'bunyan';
import * as mkdirp from 'mkdirp';
import * as _ from 'lodash';
import readFirstLine from 'firstline';
import { read as readLastLines } from 'read-last-lines';
import rimraf from 'rimraf';
import {
LogEntryType,
LogLevel,
cleanArgs,
getLogLevelString,
isLogEntry,
} from './shared';
declare global {
// We want to extend `Console`, so we need an interface.
// eslint-disable-next-line no-restricted-syntax
interface Console {
_log: typeof console.log;
_warn: typeof console.warn;
_error: typeof console.error;
}
}
let globalLogger: undefined | bunyan;
const isRunningFromConsole = Boolean(process.stdout.isTTY);
export async function initialize(): Promise<bunyan> {
if (globalLogger) {
throw new Error('Already called initialize!');
}
const basePath = app.getPath('userData');
const logPath = path.join(basePath, 'logs');
mkdirp.sync(logPath);
try {
await cleanupLogs(logPath);
} catch (error) {
const errorString = `Failed to clean logs; deleting all. Error: ${error.stack}`;
console.error(errorString);
await deleteAllLogs(logPath);
mkdirp.sync(logPath);
// If we want this log entry to persist on disk, we need to wait until we've
// set up our logging infrastructure.
setTimeout(() => {
console.error(errorString);
}, 500);
}
const logFile = path.join(logPath, 'log.log');
const loggerOptions: bunyan.LoggerOptions = {
name: 'log',
streams: [
{
type: 'rotating-file',
path: logFile,
period: '1d',
count: 3,
},
],
};
if (isRunningFromConsole) {
loggerOptions.streams?.push({
level: 'debug',
stream: process.stdout,
});
}
const logger = bunyan.createLogger(loggerOptions);
ipc.on('batch-log', (_first, batch: unknown) => {
if (!Array.isArray(batch)) {
logger.error(
'batch-log IPC event was called with a non-array; dropping logs'
);
return;
}
batch.forEach(item => {
if (isLogEntry(item)) {
const levelString = getLogLevelString(item.level);
logger[levelString](
{
time: item.time,
},
item.msg
);
} else {
logger.error(
'batch-log IPC event was called with an invalid log entry; dropping entry'
);
}
});
});
ipc.on('fetch-log', event => {
fetch(logPath).then(
data => {
event.sender.send('fetched-log', data);
},
error => {
logger.error(`Problem loading log from disk: ${error.stack}`);
}
);
});
ipc.on('delete-all-logs', async event => {
try {
await deleteAllLogs(logPath);
} catch (error) {
logger.error(`Problem deleting all logs: ${error.stack}`);
}
event.sender.send('delete-all-logs-complete');
});
globalLogger = logger;
return logger;
}
async function deleteAllLogs(logPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rimraf(
logPath,
{
disableGlob: true,
},
error => {
if (error) {
return reject(error);
}
return resolve();
}
);
});
}
async function cleanupLogs(logPath: string) {
const now = new Date();
const earliestDate = new Date(
Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate() - 3)
);
try {
const remaining = await eliminateOutOfDateFiles(logPath, earliestDate);
const files = _.filter(remaining, file => !file.start && file.end);
if (!files.length) {
return;
}
await eliminateOldEntries(files, earliestDate);
} catch (error) {
console.error(
'Error cleaning logs; deleting and starting over from scratch.',
error.stack
);
// delete and re-create the log directory
await deleteAllLogs(logPath);
mkdirp.sync(logPath);
}
}
// Exported for testing only.
export function isLineAfterDate(line: string, date: Readonly<Date>): boolean {
if (!line) {
return false;
}
try {
const data = JSON.parse(line);
return new Date(data.time).getTime() > date.getTime();
} catch (e) {
console.log('error parsing log line', e.stack, line);
return false;
}
}
// Exported for testing only.
export function eliminateOutOfDateFiles(
logPath: string,
date: Readonly<Date>
): Promise<
Array<{
path: string;
start: boolean;
end: boolean;
}>
> {
const files = fs.readdirSync(logPath);
const paths = files.map(file => path.join(logPath, file));
return Promise.all(
_.map(paths, target =>
Promise.all([readFirstLine(target), readLastLines(target, 2)]).then(
results => {
const start = results[0];
const end = results[1].split('\n');
const file = {
path: target,
start: isLineAfterDate(start, date),
end:
isLineAfterDate(end[end.length - 1], date) ||
isLineAfterDate(end[end.length - 2], date),
};
if (!file.start && !file.end) {
fs.unlinkSync(file.path);
}
return file;
}
)
)
);
}
// Exported for testing only.
export async function eliminateOldEntries(
files: ReadonlyArray<{ path: string }>,
date: Readonly<Date>
): Promise<void> {
await Promise.all(
_.map(files, file =>
fetchLog(file.path).then(lines => {
const recent = _.filter(lines, line => new Date(line.time) >= date);
const text = _.map(recent, line => JSON.stringify(line)).join('\n');
return fs.writeFileSync(file.path, `${text}\n`);
})
)
);
}
// Exported for testing only.
export function fetchLog(logFile: string): Promise<Array<LogEntryType>> {
return new Promise((resolve, reject) => {
fs.readFile(logFile, { encoding: 'utf8' }, (err, text) => {
if (err) {
return reject(err);
}
const lines = _.compact(text.split('\n'));
const data = _.compact(
lines.map(line => {
try {
const result = _.pick(JSON.parse(line), ['level', 'time', 'msg']);
return isLogEntry(result) ? result : null;
} catch (e) {
return null;
}
})
);
return resolve(data);
});
});
}
// Exported for testing only.
export function fetch(logPath: string): Promise<Array<LogEntryType>> {
const files = fs.readdirSync(logPath);
const paths = files.map(file => path.join(logPath, file));
// creating a manual log entry for the final log result
const fileListEntry: LogEntryType = {
level: LogLevel.Info,
time: new Date().toISOString(),
msg: `Loaded this list of log files from logPath: ${files.join(', ')}`,
};
return Promise.all(paths.map(fetchLog)).then(results => {
const data = _.flatten(results);
data.push(fileListEntry);
return _.sortBy(data, logEntry => logEntry.time);
});
}
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>) {
if (globalLogger) {
const levelString = getLogLevelString(level);
globalLogger[levelString](cleanArgs(args));
} else if (isRunningFromConsole) {
console._log(...args);
}
}
// This blows up using mocha --watch, so we ensure it is run just once
if (!console._log) {
console._log = console.log;
console.log = _.partial(logAtLevel, LogLevel.Info);
console._error = console.error;
console.error = _.partial(logAtLevel, LogLevel.Error);
console._warn = console.warn;
console.warn = _.partial(logAtLevel, LogLevel.Warn);
}

View file

@ -0,0 +1,150 @@
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-env node */
/* eslint-disable no-console */
import { ipcRenderer as ipc } from 'electron';
import _ from 'lodash';
import { levelFromName } from 'bunyan';
import { uploadDebugLogs } from './debuglogs';
import { redactAll } from '../../js/modules/privacy';
import { createBatcher } from '../util/batcher';
import {
LogEntryType,
LogLevel,
cleanArgs,
getLogLevelString,
isLogEntry,
} from './shared';
import { reallyJsonStringify } from '../util/reallyJsonStringify';
// To make it easier to visually scan logs, we make all levels the same length
const levelMaxLength: number = Object.keys(levelFromName).reduce(
(maxLength, level) => Math.max(maxLength, level.length),
0
);
// Backwards-compatible logging, simple strings and no level (defaulted to INFO)
function now() {
const date = new Date();
return date.toJSON();
}
function log(...args: ReadonlyArray<unknown>) {
logAtLevel(LogLevel.Info, ...args);
}
if (window.console) {
console._log = console.log;
console.log = log;
}
// The mechanics of preparing a log for publish
function getHeader() {
let header = window.navigator.userAgent;
header += ` node/${window.getNodeVersion()}`;
header += ` env/${window.getEnvironment()}`;
return header;
}
const getLevel = _.memoize((level: LogLevel): string => {
const text = getLogLevelString(level);
return text.toUpperCase().padEnd(levelMaxLength, ' ');
});
function formatLine(mightBeEntry: Readonly<unknown>): string {
const entry: LogEntryType = isLogEntry(mightBeEntry)
? mightBeEntry
: {
level: LogLevel.Error,
msg: `Invalid IPC data when fetching logs. Here's what we could recover: ${reallyJsonStringify(
mightBeEntry
)}`,
time: new Date().toISOString(),
};
return `${getLevel(entry.level)} ${entry.time} ${entry.msg}`;
}
function fetch(): Promise<string> {
return new Promise(resolve => {
ipc.send('fetch-log');
ipc.on('fetched-log', (_event, logEntries: unknown) => {
let body: string;
if (Array.isArray(logEntries)) {
body = logEntries.map(formatLine).join('\n');
} else {
const entry: LogEntryType = {
level: LogLevel.Error,
msg: 'Invalid IPC data when fetching logs; dropping all logs',
time: new Date().toISOString(),
};
body = formatLine(entry);
}
const result = `${getHeader()}\n${redactAll(body)}`;
resolve(result);
});
});
}
const publish = uploadDebugLogs;
// A modern logging interface for the browser
const env = window.getEnvironment();
const IS_PRODUCTION = env === 'production';
const ipcBatcher = createBatcher({
wait: 500,
maxSize: 500,
processBatch: (items: Array<LogEntryType>) => {
ipc.send('batch-log', items);
},
});
// The Bunyan API: https://github.com/trentm/node-bunyan#log-method-api
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>): void {
if (!IS_PRODUCTION) {
const prefix = getLogLevelString(level)
.toUpperCase()
.padEnd(levelMaxLength, ' ');
console._log(prefix, now(), ...args);
}
ipcBatcher.add({
level,
msg: cleanArgs(args),
time: new Date().toISOString(),
});
}
window.log = {
fatal: _.partial(logAtLevel, LogLevel.Fatal),
error: _.partial(logAtLevel, LogLevel.Error),
warn: _.partial(logAtLevel, LogLevel.Warn),
info: _.partial(logAtLevel, LogLevel.Info),
debug: _.partial(logAtLevel, LogLevel.Debug),
trace: _.partial(logAtLevel, LogLevel.Trace),
fetch,
publish,
};
window.onerror = (_message, _script, _line, _col, error) => {
const errorInfo = error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled error: ${errorInfo}`);
};
window.addEventListener('unhandledrejection', rejectionEvent => {
const error = rejectionEvent.reason;
const errorString =
error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled promise rejection: ${errorString}`);
});

81
ts/logging/shared.ts Normal file
View file

@ -0,0 +1,81 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as bunyan from 'bunyan';
import { redactAll } from '../../js/modules/privacy';
import { missingCaseError } from '../util/missingCaseError';
import { reallyJsonStringify } from '../util/reallyJsonStringify';
// These match [Bunyan's recommendations][0].
// [0]: https://www.npmjs.com/package/bunyan#levels
export enum LogLevel {
Fatal = 60,
Error = 50,
Warn = 40,
Info = 30,
Debug = 20,
Trace = 10,
}
// These match [Bunyan's core fields][1].
// [1]: https://www.npmjs.com/package/bunyan#core-fields
export type LogEntryType = {
level: LogLevel;
msg: string;
time: string;
};
const logLevels = new Set<LogLevel>([
LogLevel.Fatal,
LogLevel.Error,
LogLevel.Warn,
LogLevel.Info,
LogLevel.Debug,
LogLevel.Trace,
]);
function isLogLevel(value: unknown): value is LogLevel {
return typeof value === 'number' && logLevels.has(value);
}
function isValidTime(value: unknown): value is string {
return typeof value === 'string' && !Number.isNaN(new Date(value).getTime());
}
export function isLogEntry(value: unknown): value is LogEntryType {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return false;
}
const { level, time, msg } = value as Record<string, unknown>;
return typeof msg === 'string' && isLogLevel(level) && isValidTime(time);
}
export function getLogLevelString(value: LogLevel): bunyan.LogLevelString {
switch (value) {
case LogLevel.Fatal:
return 'fatal';
case LogLevel.Error:
return 'error';
case LogLevel.Warn:
return 'warn';
case LogLevel.Info:
return 'info';
case LogLevel.Debug:
return 'debug';
case LogLevel.Trace:
return 'trace';
default:
throw missingCaseError(value);
}
}
export function cleanArgs(args: ReadonlyArray<unknown>): string {
return redactAll(
args
.map(item =>
typeof item === 'string' ? item : reallyJsonStringify(item)
)
.join(' ')
);
}

View file

@ -0,0 +1,88 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { reallyJsonStringify } from '../../util/reallyJsonStringify';
describe('reallyJsonStringify', () => {
it('returns the same thing as JSON.stringify when JSON.stringify returns a string', () => {
[
null,
true,
false,
0,
-0,
123,
-Infinity,
Infinity,
NaN,
'',
'foo',
[],
[1],
{},
{ hi: 5 },
new Date(),
new Set([1, 2, 3]),
new Map([['foo', 'bar']]),
Promise.resolve(123),
{
toJSON() {
return 'foo';
},
},
].forEach(value => {
const expected = JSON.stringify(value);
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
assert.isString(actual);
});
});
it('returns a string when JSON.stringify returns undefined', () => {
const check = (value: unknown, expected: string): void => {
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
// This ensures that our test is set up correctly, not the code under test.
assert.isUndefined(JSON.stringify(value));
};
check(undefined, '[object Undefined]');
check(Symbol('foo'), '[object Symbol]');
check(
{
toJSON() {
return undefined;
},
},
'[object Object]'
);
});
it('returns a string when JSON.stringify would error', () => {
const check = (value: unknown, expected: string): void => {
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
// This ensures that our test is set up correctly, not the code under test.
assert.throws(() => JSON.stringify(value));
};
check(BigInt(123), '[object BigInt]');
const a: Record<string, unknown> = {};
const b = { a };
a.b = b;
check(a, '[object Object]');
check([a], '[object Array]');
const bad = {
toJSON() {
throw new Error("don't even try to stringify me");
},
};
check(bad, '[object Object]');
check([bad], '[object Array]');
});
});

View file

@ -0,0 +1,307 @@
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
// NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`:
/* eslint-disable more/no-then */
import * as fs from 'fs';
import * as fse from 'fs-extra';
import * as os from 'os';
import * as path from 'path';
import { expect } from 'chai';
import {
eliminateOutOfDateFiles,
eliminateOldEntries,
isLineAfterDate,
fetchLog,
fetch,
} from '../logging/main_process_logging';
describe('logging', () => {
const fakeLogEntry = ({
level = 30,
msg = 'hello world',
time = new Date().toISOString(),
}: {
level?: number;
msg?: string;
time?: string;
}): Record<string, unknown> => ({
level,
msg,
time,
});
const fakeLogLine = (...args: Parameters<typeof fakeLogEntry>): string =>
JSON.stringify(fakeLogEntry(...args));
let tmpDir: string;
beforeEach(async () => {
tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'signal-test-'));
});
afterEach(async () => {
await fse.remove(tmpDir);
});
describe('#isLineAfterDate', () => {
it('returns false if falsy', () => {
const actual = isLineAfterDate('', new Date());
expect(actual).to.equal(false);
});
it('returns false if invalid JSON', () => {
const actual = isLineAfterDate('{{}', new Date());
expect(actual).to.equal(false);
});
it('returns false if date is invalid', () => {
const line = JSON.stringify({ time: '2018-01-04T19:17:05.014Z' });
const actual = isLineAfterDate(line, new Date('try6'));
expect(actual).to.equal(false);
});
it('returns false if log time is invalid', () => {
const line = JSON.stringify({ time: 'try7' });
const date = new Date('2018-01-04T19:17:00.000Z');
const actual = isLineAfterDate(line, date);
expect(actual).to.equal(false);
});
it('returns false if date before provided date', () => {
const line = JSON.stringify({ time: '2018-01-04T19:17:00.000Z' });
const date = new Date('2018-01-04T19:17:05.014Z');
const actual = isLineAfterDate(line, date);
expect(actual).to.equal(false);
});
it('returns true if date is after provided date', () => {
const line = JSON.stringify({ time: '2018-01-04T19:17:05.014Z' });
const date = new Date('2018-01-04T19:17:00.000Z');
const actual = isLineAfterDate(line, date);
expect(actual).to.equal(true);
});
});
describe('#eliminateOutOfDateFiles', () => {
it('deletes an empty file', () => {
const date = new Date();
const log = '\n';
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, log);
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
it('deletes a file with invalid JSON lines', () => {
const date = new Date();
const log = '{{}\n';
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, log);
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
it('deletes a file with all dates before provided date', () => {
const date = new Date('2018-01-04T19:17:05.014Z');
const contents = [
JSON.stringify({ time: '2018-01-04T19:17:00.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
it('keeps a file with first line date before provided date', () => {
const date = new Date('2018-01-04T19:16:00.000Z');
const contents = [
JSON.stringify({ time: '2018-01-04T19:17:00.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(true);
});
});
it('keeps a file with last line date before provided date', () => {
const date = new Date('2018-01-04T19:17:01.000Z');
const contents = [
JSON.stringify({ time: '2018-01-04T19:17:00.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(true);
});
});
});
describe('#eliminateOldEntries', () => {
it('eliminates all non-parsing entries', () => {
const date = new Date('2018-01-04T19:17:01.000Z');
const contents = [
'random line',
fakeLogLine({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const expected = [
fakeLogEntry({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogEntry({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogEntry({ time: '2018-01-04T19:17:03.014Z' }),
];
const target = path.join(tmpDir, 'log.log');
const files = [
{
path: target,
},
];
fs.writeFileSync(target, contents);
return eliminateOldEntries(files, date).then(() => {
const actualEntries = fs
.readFileSync(target, 'utf8')
.split('\n')
.map(line => line.trim())
.filter(Boolean)
.map(line => JSON.parse(line));
expect(actualEntries).to.deep.equal(expected);
});
});
it('preserves all lines if before target date', () => {
const date = new Date('2018-01-04T19:17:03.000Z');
const contents = [
'random line',
fakeLogLine({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const expected = fakeLogEntry({ time: '2018-01-04T19:17:03.014Z' });
const target = path.join(tmpDir, 'log.log');
const files = [
{
path: target,
},
];
fs.writeFileSync(target, contents);
return eliminateOldEntries(files, date).then(() => {
// There should only be 1 line, so we can parse it safely.
expect(JSON.parse(fs.readFileSync(target, 'utf8'))).to.deep.equal(
expected
);
});
});
});
describe('#fetchLog', () => {
it('returns error if file does not exist', () => {
const target = 'random_file';
return fetchLog(target).then(
() => {
throw new Error('Expected an error!');
},
error => {
expect(error)
.to.have.property('message')
.that.match(/random_file/);
}
);
});
it('returns empty array if file has no valid JSON lines', () => {
const contents = 'line 1\nline2\n';
const target = path.join(tmpDir, 'test.log');
fs.writeFileSync(target, contents);
return fetchLog(target).then(result => {
expect(result).to.deep.equal([]);
});
});
it('returns just three fields in each returned line', () => {
const contents = [
JSON.stringify({
one: 1,
two: 2,
level: 30,
time: '2020-04-20T06:09:08.000Z',
msg: 'message 1',
}),
JSON.stringify({
one: 1,
two: 2,
level: 40,
time: '2021-04-20T06:09:08.000Z',
msg: 'message 2',
}),
'',
].join('\n');
const expected = [
{
level: 30,
time: '2020-04-20T06:09:08.000Z',
msg: 'message 1',
},
{
level: 40,
time: '2021-04-20T06:09:08.000Z',
msg: 'message 2',
},
];
const target = path.join(tmpDir, 'test.log');
fs.writeFileSync(target, contents);
return fetchLog(target).then(result => {
expect(result).to.deep.equal(expected);
});
});
});
describe('#fetch', () => {
it('returns single entry if no files', () => {
return fetch(tmpDir).then(results => {
expect(results).to.have.length(1);
expect(results[0].msg).to.match(/Loaded this list/);
});
});
it('returns sorted entries from all files', () => {
const first = [
fakeLogLine({ msg: '2', time: '2018-01-04T19:17:05.014Z' }),
'',
].join('\n');
const second = [
fakeLogLine({ msg: '1', time: '2018-01-04T19:17:00.014Z' }),
fakeLogLine({ msg: '3', time: '2018-01-04T19:18:00.014Z' }),
'',
].join('\n');
fs.writeFileSync(path.join(tmpDir, 'first.log'), first);
fs.writeFileSync(path.join(tmpDir, 'second.log'), second);
return fetch(tmpDir).then(results => {
expect(results).to.have.length(4);
expect(results[0].msg).to.equal('1');
expect(results[1].msg).to.equal('2');
expect(results[2].msg).to.equal('3');
});
});
});
});

View file

@ -24,7 +24,7 @@ window.waitForAllBatchers = async () => {
export type BatcherOptionsType<ItemType> = {
wait: number;
maxSize: number;
processBatch: (items: Array<ItemType>) => Promise<void>;
processBatch: (items: Array<ItemType>) => void | Promise<void>;
};
export type BatcherType<ItemType> = {

View file

@ -0,0 +1,43 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/**
* Returns `JSON.stringify(value)` if that returns a string, otherwise returns a value
* like `[object Object]` or `[object Undefined]`.
*
* `JSON.stringify` doesn't always return a string. Some examples:
*
* JSON.stringify(undefined) === undefined
*
* JSON.stringify(Symbol()) === undefined
*
* JSON.stringify({ toJSON() {} }) === undefined
*
* const a = {};
* const b = { a };
* a.b = a;
* JSON.stringify(a); // => Throws a TypeError
*
* JSON.stringify(123n); // => Throws a TypeError
*
* const scary = {
* toJSON() {
* throw new Error('uh oh');
* }
* };
* JSON.stringify(scary); // => Throws "uh oh"
*
* This makes sure we return a string and don't throw.
*/
export function reallyJsonStringify(value: unknown): string {
let result: unknown;
try {
result = JSON.stringify(value);
} catch (_err) {
result = undefined;
}
return typeof result === 'string'
? result
: Object.prototype.toString.call(value);
}

9
ts/window.d.ts vendored
View file

@ -24,6 +24,7 @@ import {
} from './libsignal.d';
import { ContactRecordIdentityState, TextSecureType } from './textsecure.d';
import { WebAPIConnectType } from './textsecure/WebAPI';
import { uploadDebugLogs } from './logging/debuglogs';
import { CallingClass } from './services/calling';
import * as Groups from './groups';
import * as Crypto from './Crypto';
@ -142,6 +143,7 @@ declare global {
getInteractionMode: () => 'mouse' | 'keyboard';
getMediaCameraPermissions: () => Promise<boolean>;
getMediaPermissions: () => Promise<boolean>;
getNodeVersion: () => string;
getServerPublicParams: () => string;
getSfuUrl: () => string;
getSocketStatus: () => number;
@ -170,9 +172,14 @@ declare global {
};
libsignal: LibSignalType;
log: {
fatal: LoggerType;
info: LoggerType;
warn: LoggerType;
error: LoggerType;
debug: LoggerType;
trace: LoggerType;
fetch: () => Promise<string>;
publish: typeof uploadDebugLogs;
};
nodeSetImmediate: typeof setImmediate;
normalizeUuids: (obj: any, paths: Array<string>, context: string) => void;
@ -592,7 +599,7 @@ export class CanvasVideoRenderer {
constructor(canvas: Ref<HTMLCanvasElement>);
}
export type LoggerType = (...args: Array<any>) => void;
export type LoggerType = (...args: Array<unknown>) => void;
export type WhisperType = {
events: {