Convert logging infrastructure to TypeScript

This commit is contained in:
Evan Hahn 2021-01-27 15:13:33 -06:00 committed by GitHub
parent 10ace53845
commit a8787e7c9e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 595 additions and 331 deletions

View file

@ -1,4 +1,4 @@
// Copyright 2018-2020 Signal Messenger, LLC
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global window */
@ -19,4 +19,4 @@ window.closeAbout = () => ipcRenderer.send('close-about');
window.i18n = i18n.setup(locale, localeMessages);
require('./js/logging');
require('./ts/logging/set_up_renderer_logging');

View file

@ -1,4 +1,4 @@
// Copyright 2018-2020 Signal Messenger, LLC
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global window */
@ -23,7 +23,7 @@ window.nodeSetImmediate = setImmediate;
window.getNodeVersion = () => config.node_version;
window.getEnvironment = () => config.environment;
require('./js/logging');
require('./ts/logging/set_up_renderer_logging');
window.closeDebugLog = () => ipcRenderer.send('close-debug-log');
window.Backbone = require('backbone');

View file

@ -1,152 +0,0 @@
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-env node */
/* eslint-disable no-console */
const electron = require('electron');
const _ = require('lodash');
const { uploadDebugLogs } = require('../ts/logging/debuglogs');
const Privacy = require('./modules/privacy');
const { createBatcher } = require('../ts/util/batcher');
const ipc = electron.ipcRenderer;
// Default Bunyan levels: https://github.com/trentm/node-bunyan#levels
// To make it easier to visually scan logs, we make all levels the same length
const BLANK_LEVEL = ' ';
const LEVELS = {
60: 'fatal',
50: 'error',
40: 'warn ',
30: 'info ',
20: 'debug',
10: 'trace',
};
// Backwards-compatible logging, simple strings and no level (defaulted to INFO)
function now() {
const date = new Date();
return date.toJSON();
}
// To avoid [Object object] in our log since console.log handles non-strings smoothly
function cleanArgsForIPC(args) {
const str = args.map(item => {
if (typeof item !== 'string') {
try {
return JSON.stringify(item);
} catch (error) {
return item;
}
}
return item;
});
return str.join(' ');
}
function log(...args) {
logAtLevel('info', 'INFO ', ...args);
}
if (window.console) {
console._log = console.log;
console.log = log;
}
// The mechanics of preparing a log for publish
function getHeader() {
let header = window.navigator.userAgent;
header += ` node/${window.getNodeVersion()}`;
header += ` env/${window.getEnvironment()}`;
return header;
}
function getLevel(level) {
const text = LEVELS[level];
if (!text) {
return BLANK_LEVEL;
}
return text.toUpperCase();
}
function formatLine(entry) {
return `${getLevel(entry.level)} ${entry.time} ${entry.msg}`;
}
function format(entries) {
return Privacy.redactAll(entries.map(formatLine).join('\n'));
}
function fetch() {
return new Promise(resolve => {
ipc.send('fetch-log');
ipc.on('fetched-log', (event, text) => {
const result = `${getHeader()}\n${format(text)}`;
resolve(result);
});
});
}
const publish = uploadDebugLogs;
// A modern logging interface for the browser
const env = window.getEnvironment();
const IS_PRODUCTION = env === 'production';
const ipcBatcher = createBatcher({
wait: 500,
maxSize: 500,
processBatch: items => {
ipc.send('batch-log', items);
},
});
// The Bunyan API: https://github.com/trentm/node-bunyan#log-method-api
function logAtLevel(level, prefix, ...args) {
if (!IS_PRODUCTION) {
console._log(prefix, now(), ...args);
}
const str = cleanArgsForIPC(args);
const logText = Privacy.redactAll(str);
ipcBatcher.add({
timestamp: Date.now(),
level,
logText,
});
}
window.log = {
fatal: _.partial(logAtLevel, 'fatal', 'FATAL'),
error: _.partial(logAtLevel, 'error', 'ERROR'),
warn: _.partial(logAtLevel, 'warn', 'WARN '),
info: _.partial(logAtLevel, 'info', 'INFO '),
debug: _.partial(logAtLevel, 'debug', 'DEBUG'),
trace: _.partial(logAtLevel, 'trace', 'TRACE'),
fetch,
publish,
};
window.onerror = (message, script, line, col, error) => {
const errorInfo = error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled error: ${errorInfo}`);
};
window.addEventListener('unhandledrejection', rejectionEvent => {
const error = rejectionEvent.reason;
const errorString =
error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled promise rejection: ${errorString}`);
});

View file

@ -87,7 +87,7 @@ const updater = require('./ts/updater/index');
const createTrayIcon = require('./app/tray_icon');
const dockIcon = require('./ts/dock_icon');
const ephemeralConfig = require('./app/ephemeral_config');
const logging = require('./app/logging');
const logging = require('./ts/logging/main_process_logging');
const sql = require('./ts/sql/Server').default;
const sqlChannels = require('./app/sql_channel');
const windowState = require('./app/window_state');
@ -897,8 +897,7 @@ app.on('ready', async () => {
installPermissionsHandler({ session, userConfig });
await logging.initialize();
logger = logging.getLogger();
logger = await logging.initialize();
logger.info('app ready');
logger.info(`starting version ${packageJson.version}`);

View file

@ -163,6 +163,7 @@
"@storybook/react": "5.1.11",
"@types/backbone": "1.4.3",
"@types/blueimp-load-image": "2.23.6",
"@types/bunyan": "1.8.6",
"@types/chai": "4.1.2",
"@types/classnames": "2.2.3",
"@types/config": "0.0.34",

View file

@ -1,4 +1,4 @@
// Copyright 2018-2020 Signal Messenger, LLC
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global window */
@ -43,7 +43,7 @@ window.subscribeToSystemThemeChange = fn => {
});
};
require('./js/logging');
require('./ts/logging/set_up_renderer_logging');
window.closePermissionsPopup = () =>
ipcRenderer.send('close-permissions-popup');

View file

@ -1,4 +1,4 @@
// Copyright 2017-2020 Signal Messenger, LLC
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global Whisper, window */
@ -359,7 +359,7 @@ try {
// We pull these dependencies in now, from here, because they have Node.js dependencies
require('./js/logging');
require('./ts/logging/set_up_renderer_logging');
if (config.proxyUrl) {
window.log.info('Using provided proxy url');

View file

@ -1,4 +1,4 @@
// Copyright 2018-2020 Signal Messenger, LLC
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global window */
@ -120,6 +120,6 @@ function makeSetter(name) {
});
}
require('./js/logging');
require('./ts/logging/set_up_renderer_logging');
window.Backbone = require('backbone');

View file

@ -1,4 +1,4 @@
// Copyright 2019-2020 Signal Messenger, LLC
// Copyright 2019-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* global window */
@ -31,7 +31,7 @@ window.Backbone = require('backbone');
window.localeMessages = ipc.sendSync('locale-data');
require('../js/logging');
require('../ts/logging/set_up_renderer_logging');
window.log.info('sticker-creator starting up...');

14
ts/firstline.d.ts vendored Normal file
View file

@ -0,0 +1,14 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
declare module 'firstline' {
interface FirstLineOpts {
encoding?: BufferEncoding;
lineEnding?: '\n';
}
export default function firstLine(
filePath: string,
opts?: FirstLineOpts
): Promise<string>;
}

View file

@ -1,41 +1,44 @@
// Copyright 2017-2020 Signal Messenger, LLC
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
// NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`:
/* eslint-disable more/no-then */
/* eslint-disable no-console */
const path = require('path');
const fs = require('fs');
import * as path from 'path';
import * as fs from 'fs';
import { app, ipcMain as ipc } from 'electron';
import * as bunyan from 'bunyan';
import * as mkdirp from 'mkdirp';
import * as _ from 'lodash';
import readFirstLine from 'firstline';
import { read as readLastLines } from 'read-last-lines';
import rimraf from 'rimraf';
const electron = require('electron');
const bunyan = require('bunyan');
const mkdirp = require('mkdirp');
const _ = require('lodash');
const readFirstLine = require('firstline');
const readLastLines = require('read-last-lines').read;
const rimraf = require('rimraf');
import {
LogEntryType,
LogLevel,
cleanArgs,
getLogLevelString,
isLogEntry,
} from './shared';
const { redactAll } = require('../js/modules/privacy');
declare global {
// We want to extend `Console`, so we need an interface.
// eslint-disable-next-line no-restricted-syntax
interface Console {
_log: typeof console.log;
_warn: typeof console.warn;
_error: typeof console.error;
}
}
const { app, ipcMain: ipc } = electron;
const LEVELS = ['fatal', 'error', 'warn', 'info', 'debug', 'trace'];
let logger;
let globalLogger: undefined | bunyan;
const isRunningFromConsole = Boolean(process.stdout.isTTY);
module.exports = {
initialize,
getLogger,
// for tests only:
isLineAfterDate,
eliminateOutOfDateFiles,
eliminateOldEntries,
fetchLog,
fetch,
};
async function initialize() {
if (logger) {
export async function initialize(): Promise<bunyan> {
if (globalLogger) {
throw new Error('Already called initialize!');
}
@ -59,7 +62,7 @@ async function initialize() {
}
const logFile = path.join(logPath, 'log.log');
const loggerOptions = {
const loggerOptions: bunyan.LoggerOptions = {
name: 'log',
streams: [
{
@ -72,28 +75,36 @@ async function initialize() {
};
if (isRunningFromConsole) {
loggerOptions.streams.push({
loggerOptions.streams?.push({
level: 'debug',
stream: process.stdout,
});
}
logger = bunyan.createLogger(loggerOptions);
const logger = bunyan.createLogger(loggerOptions);
LEVELS.forEach(level => {
ipc.on(`log-${level}`, (first, ...rest) => {
logger[level](...rest);
});
});
ipc.on('batch-log', (first, batch) => {
batch.forEach(item => {
logger[item.level](
{
time: new Date(item.timestamp),
},
item.logText
ipc.on('batch-log', (_first, batch: unknown) => {
if (!Array.isArray(batch)) {
logger.error(
'batch-log IPC event was called with a non-array; dropping logs'
);
return;
}
batch.forEach(item => {
if (isLogEntry(item)) {
const levelString = getLogLevelString(item.level);
logger[levelString](
{
time: item.time,
},
item.msg
);
} else {
logger.error(
'batch-log IPC event was called with an invalid log entry; dropping entry'
);
}
});
});
@ -117,9 +128,13 @@ async function initialize() {
event.sender.send('delete-all-logs-complete');
});
globalLogger = logger;
return logger;
}
async function deleteAllLogs(logPath) {
async function deleteAllLogs(logPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rimraf(
logPath,
@ -137,7 +152,7 @@ async function deleteAllLogs(logPath) {
});
}
async function cleanupLogs(logPath) {
async function cleanupLogs(logPath: string) {
const now = new Date();
const earliestDate = new Date(
Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate() - 3)
@ -164,7 +179,8 @@ async function cleanupLogs(logPath) {
}
}
function isLineAfterDate(line, date) {
// Exported for testing only.
export function isLineAfterDate(line: string, date: Readonly<Date>): boolean {
if (!line) {
return false;
}
@ -178,7 +194,17 @@ function isLineAfterDate(line, date) {
}
}
function eliminateOutOfDateFiles(logPath, date) {
// Exported for testing only.
export function eliminateOutOfDateFiles(
logPath: string,
date: Readonly<Date>
): Promise<
Array<{
path: string;
start: boolean;
end: boolean;
}>
> {
const files = fs.readdirSync(logPath);
const paths = files.map(file => path.join(logPath, file));
@ -208,16 +234,15 @@ function eliminateOutOfDateFiles(logPath, date) {
);
}
function eliminateOldEntries(files, date) {
const earliest = date.getTime();
return Promise.all(
// Exported for testing only.
export async function eliminateOldEntries(
files: ReadonlyArray<{ path: string }>,
date: Readonly<Date>
): Promise<void> {
await Promise.all(
_.map(files, file =>
fetchLog(file.path).then(lines => {
const recent = _.filter(
lines,
line => new Date(line.time).getTime() >= earliest
);
const recent = _.filter(lines, line => new Date(line.time) >= date);
const text = _.map(recent, line => JSON.stringify(line)).join('\n');
return fs.writeFileSync(file.path, `${text}\n`);
@ -226,15 +251,8 @@ function eliminateOldEntries(files, date) {
);
}
function getLogger() {
if (!logger) {
throw new Error("Logger hasn't been initialized yet!");
}
return logger;
}
function fetchLog(logFile) {
// Exported for testing only.
export function fetchLog(logFile: string): Promise<Array<LogEntryType>> {
return new Promise((resolve, reject) => {
fs.readFile(logFile, { encoding: 'utf8' }, (err, text) => {
if (err) {
@ -245,7 +263,8 @@ function fetchLog(logFile) {
const data = _.compact(
lines.map(line => {
try {
return _.pick(JSON.parse(line), ['level', 'time', 'msg']);
const result = _.pick(JSON.parse(line), ['level', 'time', 'msg']);
return isLogEntry(result) ? result : null;
} catch (e) {
return null;
}
@ -257,15 +276,15 @@ function fetchLog(logFile) {
});
}
function fetch(logPath) {
// Exported for testing only.
export function fetch(logPath: string): Promise<Array<LogEntryType>> {
const files = fs.readdirSync(logPath);
const paths = files.map(file => path.join(logPath, file));
// creating a manual log entry for the final log result
const now = new Date();
const fileListEntry = {
level: 30, // INFO
time: now.toJSON(),
const fileListEntry: LogEntryType = {
level: LogLevel.Info,
time: new Date().toISOString(),
msg: `Loaded this list of log files from logPath: ${files.join(', ')}`,
};
@ -274,25 +293,14 @@ function fetch(logPath) {
data.push(fileListEntry);
return _.sortBy(data, 'time');
return _.sortBy(data, logEntry => logEntry.time);
});
}
function logAtLevel(level, ...args) {
if (logger) {
// To avoid [Object object] in our log since console.log handles non-strings smoothly
const str = args.map(item => {
if (typeof item !== 'string') {
try {
return JSON.stringify(item);
} catch (e) {
return item;
}
}
return item;
});
logger[level](redactAll(str.join(' ')));
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>) {
if (globalLogger) {
const levelString = getLogLevelString(level);
globalLogger[levelString](cleanArgs(args));
} else if (isRunningFromConsole) {
console._log(...args);
}
@ -301,9 +309,9 @@ function logAtLevel(level, ...args) {
// This blows up using mocha --watch, so we ensure it is run just once
if (!console._log) {
console._log = console.log;
console.log = _.partial(logAtLevel, 'info');
console.log = _.partial(logAtLevel, LogLevel.Info);
console._error = console.error;
console.error = _.partial(logAtLevel, 'error');
console.error = _.partial(logAtLevel, LogLevel.Error);
console._warn = console.warn;
console.warn = _.partial(logAtLevel, 'warn');
console.warn = _.partial(logAtLevel, LogLevel.Warn);
}

View file

@ -0,0 +1,150 @@
// Copyright 2017-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/* eslint-env node */
/* eslint-disable no-console */
import { ipcRenderer as ipc } from 'electron';
import _ from 'lodash';
import { levelFromName } from 'bunyan';
import { uploadDebugLogs } from './debuglogs';
import { redactAll } from '../../js/modules/privacy';
import { createBatcher } from '../util/batcher';
import {
LogEntryType,
LogLevel,
cleanArgs,
getLogLevelString,
isLogEntry,
} from './shared';
import { reallyJsonStringify } from '../util/reallyJsonStringify';
// To make it easier to visually scan logs, we make all levels the same length
const levelMaxLength: number = Object.keys(levelFromName).reduce(
(maxLength, level) => Math.max(maxLength, level.length),
0
);
// Backwards-compatible logging, simple strings and no level (defaulted to INFO)
function now() {
const date = new Date();
return date.toJSON();
}
function log(...args: ReadonlyArray<unknown>) {
logAtLevel(LogLevel.Info, ...args);
}
if (window.console) {
console._log = console.log;
console.log = log;
}
// The mechanics of preparing a log for publish
function getHeader() {
let header = window.navigator.userAgent;
header += ` node/${window.getNodeVersion()}`;
header += ` env/${window.getEnvironment()}`;
return header;
}
const getLevel = _.memoize((level: LogLevel): string => {
const text = getLogLevelString(level);
return text.toUpperCase().padEnd(levelMaxLength, ' ');
});
function formatLine(mightBeEntry: Readonly<unknown>): string {
const entry: LogEntryType = isLogEntry(mightBeEntry)
? mightBeEntry
: {
level: LogLevel.Error,
msg: `Invalid IPC data when fetching logs. Here's what we could recover: ${reallyJsonStringify(
mightBeEntry
)}`,
time: new Date().toISOString(),
};
return `${getLevel(entry.level)} ${entry.time} ${entry.msg}`;
}
function fetch(): Promise<string> {
return new Promise(resolve => {
ipc.send('fetch-log');
ipc.on('fetched-log', (_event, logEntries: unknown) => {
let body: string;
if (Array.isArray(logEntries)) {
body = logEntries.map(formatLine).join('\n');
} else {
const entry: LogEntryType = {
level: LogLevel.Error,
msg: 'Invalid IPC data when fetching logs; dropping all logs',
time: new Date().toISOString(),
};
body = formatLine(entry);
}
const result = `${getHeader()}\n${redactAll(body)}`;
resolve(result);
});
});
}
const publish = uploadDebugLogs;
// A modern logging interface for the browser
const env = window.getEnvironment();
const IS_PRODUCTION = env === 'production';
const ipcBatcher = createBatcher({
wait: 500,
maxSize: 500,
processBatch: (items: Array<LogEntryType>) => {
ipc.send('batch-log', items);
},
});
// The Bunyan API: https://github.com/trentm/node-bunyan#log-method-api
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>): void {
if (!IS_PRODUCTION) {
const prefix = getLogLevelString(level)
.toUpperCase()
.padEnd(levelMaxLength, ' ');
console._log(prefix, now(), ...args);
}
ipcBatcher.add({
level,
msg: cleanArgs(args),
time: new Date().toISOString(),
});
}
window.log = {
fatal: _.partial(logAtLevel, LogLevel.Fatal),
error: _.partial(logAtLevel, LogLevel.Error),
warn: _.partial(logAtLevel, LogLevel.Warn),
info: _.partial(logAtLevel, LogLevel.Info),
debug: _.partial(logAtLevel, LogLevel.Debug),
trace: _.partial(logAtLevel, LogLevel.Trace),
fetch,
publish,
};
window.onerror = (_message, _script, _line, _col, error) => {
const errorInfo = error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled error: ${errorInfo}`);
};
window.addEventListener('unhandledrejection', rejectionEvent => {
const error = rejectionEvent.reason;
const errorString =
error && error.stack ? error.stack : JSON.stringify(error);
window.log.error(`Top-level unhandled promise rejection: ${errorString}`);
});

81
ts/logging/shared.ts Normal file
View file

@ -0,0 +1,81 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as bunyan from 'bunyan';
import { redactAll } from '../../js/modules/privacy';
import { missingCaseError } from '../util/missingCaseError';
import { reallyJsonStringify } from '../util/reallyJsonStringify';
// These match [Bunyan's recommendations][0].
// [0]: https://www.npmjs.com/package/bunyan#levels
export enum LogLevel {
Fatal = 60,
Error = 50,
Warn = 40,
Info = 30,
Debug = 20,
Trace = 10,
}
// These match [Bunyan's core fields][1].
// [1]: https://www.npmjs.com/package/bunyan#core-fields
export type LogEntryType = {
level: LogLevel;
msg: string;
time: string;
};
const logLevels = new Set<LogLevel>([
LogLevel.Fatal,
LogLevel.Error,
LogLevel.Warn,
LogLevel.Info,
LogLevel.Debug,
LogLevel.Trace,
]);
function isLogLevel(value: unknown): value is LogLevel {
return typeof value === 'number' && logLevels.has(value);
}
function isValidTime(value: unknown): value is string {
return typeof value === 'string' && !Number.isNaN(new Date(value).getTime());
}
export function isLogEntry(value: unknown): value is LogEntryType {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return false;
}
const { level, time, msg } = value as Record<string, unknown>;
return typeof msg === 'string' && isLogLevel(level) && isValidTime(time);
}
export function getLogLevelString(value: LogLevel): bunyan.LogLevelString {
switch (value) {
case LogLevel.Fatal:
return 'fatal';
case LogLevel.Error:
return 'error';
case LogLevel.Warn:
return 'warn';
case LogLevel.Info:
return 'info';
case LogLevel.Debug:
return 'debug';
case LogLevel.Trace:
return 'trace';
default:
throw missingCaseError(value);
}
}
export function cleanArgs(args: ReadonlyArray<unknown>): string {
return redactAll(
args
.map(item =>
typeof item === 'string' ? item : reallyJsonStringify(item)
)
.join(' ')
);
}

View file

@ -0,0 +1,88 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { reallyJsonStringify } from '../../util/reallyJsonStringify';
describe('reallyJsonStringify', () => {
it('returns the same thing as JSON.stringify when JSON.stringify returns a string', () => {
[
null,
true,
false,
0,
-0,
123,
-Infinity,
Infinity,
NaN,
'',
'foo',
[],
[1],
{},
{ hi: 5 },
new Date(),
new Set([1, 2, 3]),
new Map([['foo', 'bar']]),
Promise.resolve(123),
{
toJSON() {
return 'foo';
},
},
].forEach(value => {
const expected = JSON.stringify(value);
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
assert.isString(actual);
});
});
it('returns a string when JSON.stringify returns undefined', () => {
const check = (value: unknown, expected: string): void => {
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
// This ensures that our test is set up correctly, not the code under test.
assert.isUndefined(JSON.stringify(value));
};
check(undefined, '[object Undefined]');
check(Symbol('foo'), '[object Symbol]');
check(
{
toJSON() {
return undefined;
},
},
'[object Object]'
);
});
it('returns a string when JSON.stringify would error', () => {
const check = (value: unknown, expected: string): void => {
const actual = reallyJsonStringify(value);
assert.strictEqual(actual, expected);
// This ensures that our test is set up correctly, not the code under test.
assert.throws(() => JSON.stringify(value));
};
check(BigInt(123), '[object BigInt]');
const a: Record<string, unknown> = {};
const b = { a };
a.b = b;
check(a, '[object Object]');
check([a], '[object Array]');
const bad = {
toJSON() {
throw new Error("don't even try to stringify me");
},
};
check(bad, '[object Object]');
check([bad], '[object Array]');
});
});

View file

@ -1,37 +1,49 @@
// Copyright 2018-2020 Signal Messenger, LLC
// Copyright 2018-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
// NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`:
/* eslint-disable more/no-then */
const fs = require('fs');
const path = require('path');
import * as fs from 'fs';
import * as fse from 'fs-extra';
import * as os from 'os';
import * as path from 'path';
import { expect } from 'chai';
const tmp = require('tmp');
const { expect } = require('chai');
const {
import {
eliminateOutOfDateFiles,
eliminateOldEntries,
isLineAfterDate,
fetchLog,
fetch,
} = require('../../app/logging');
} from '../logging/main_process_logging';
describe('app/logging', () => {
let basePath;
let tmpDir;
beforeEach(() => {
tmpDir = tmp.dirSync({
unsafeCleanup: true,
});
basePath = tmpDir.name;
describe('logging', () => {
const fakeLogEntry = ({
level = 30,
msg = 'hello world',
time = new Date().toISOString(),
}: {
level?: number;
msg?: string;
time?: string;
}): Record<string, unknown> => ({
level,
msg,
time,
});
afterEach(done => {
// we need the unsafe option to recursively remove the directory
tmpDir.removeCallback(done);
const fakeLogLine = (...args: Parameters<typeof fakeLogEntry>): string =>
JSON.stringify(fakeLogEntry(...args));
let tmpDir: string;
beforeEach(async () => {
tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'signal-test-'));
});
afterEach(async () => {
await fse.remove(tmpDir);
});
describe('#isLineAfterDate', () => {
@ -72,20 +84,20 @@ describe('app/logging', () => {
it('deletes an empty file', () => {
const date = new Date();
const log = '\n';
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, log);
return eliminateOutOfDateFiles(basePath, date).then(() => {
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
it('deletes a file with invalid JSON lines', () => {
const date = new Date();
const log = '{{}\n';
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, log);
return eliminateOutOfDateFiles(basePath, date).then(() => {
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
@ -97,10 +109,10 @@ describe('app/logging', () => {
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(basePath, date).then(() => {
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(false);
});
});
@ -112,10 +124,10 @@ describe('app/logging', () => {
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(basePath, date).then(() => {
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(true);
});
});
@ -127,10 +139,10 @@ describe('app/logging', () => {
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
fs.writeFileSync(target, contents);
return eliminateOutOfDateFiles(basePath, date).then(() => {
return eliminateOutOfDateFiles(tmpDir, date).then(() => {
expect(fs.existsSync(target)).to.equal(true);
});
});
@ -141,17 +153,17 @@ describe('app/logging', () => {
const date = new Date('2018-01-04T19:17:01.000Z');
const contents = [
'random line',
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const expected = [
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
fakeLogEntry({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogEntry({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogEntry({ time: '2018-01-04T19:17:03.014Z' }),
];
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
const files = [
{
path: target,
@ -161,22 +173,26 @@ describe('app/logging', () => {
fs.writeFileSync(target, contents);
return eliminateOldEntries(files, date).then(() => {
expect(fs.readFileSync(target, 'utf8')).to.equal(`${expected}\n`);
const actualEntries = fs
.readFileSync(target, 'utf8')
.split('\n')
.map(line => line.trim())
.filter(Boolean)
.map(line => JSON.parse(line));
expect(actualEntries).to.deep.equal(expected);
});
});
it('preserves all lines if before target date', () => {
const date = new Date('2018-01-04T19:17:03.000Z');
const contents = [
'random line',
JSON.stringify({ time: '2018-01-04T19:17:01.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:02.014Z' }),
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const expected = [
JSON.stringify({ time: '2018-01-04T19:17:03.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:01.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:02.014Z' }),
fakeLogLine({ time: '2018-01-04T19:17:03.014Z' }),
].join('\n');
const expected = fakeLogEntry({ time: '2018-01-04T19:17:03.014Z' });
const target = path.join(basePath, 'log.log');
const target = path.join(tmpDir, 'log.log');
const files = [
{
path: target,
@ -186,7 +202,10 @@ describe('app/logging', () => {
fs.writeFileSync(target, contents);
return eliminateOldEntries(files, date).then(() => {
expect(fs.readFileSync(target, 'utf8')).to.equal(`${expected}\n`);
// There should only be 1 line, so we can parse it safely.
expect(JSON.parse(fs.readFileSync(target, 'utf8'))).to.deep.equal(
expected
);
});
});
});
@ -207,13 +226,12 @@ describe('app/logging', () => {
});
it('returns empty array if file has no valid JSON lines', () => {
const contents = 'line 1\nline2\n';
const expected = [];
const target = path.join(basePath, 'test.log');
const target = path.join(tmpDir, 'test.log');
fs.writeFileSync(target, contents);
return fetchLog(target).then(result => {
expect(result).to.deep.equal(expected);
expect(result).to.deep.equal([]);
});
});
it('returns just three fields in each returned line', () => {
@ -221,33 +239,33 @@ describe('app/logging', () => {
JSON.stringify({
one: 1,
two: 2,
level: 1,
time: 2,
msg: 3,
level: 30,
time: '2020-04-20T06:09:08.000Z',
msg: 'message 1',
}),
JSON.stringify({
one: 1,
two: 2,
level: 2,
time: 3,
msg: 4,
level: 40,
time: '2021-04-20T06:09:08.000Z',
msg: 'message 2',
}),
'',
].join('\n');
const expected = [
{
level: 1,
time: 2,
msg: 3,
level: 30,
time: '2020-04-20T06:09:08.000Z',
msg: 'message 1',
},
{
level: 2,
time: 3,
msg: 4,
level: 40,
time: '2021-04-20T06:09:08.000Z',
msg: 'message 2',
},
];
const target = path.join(basePath, 'test.log');
const target = path.join(tmpDir, 'test.log');
fs.writeFileSync(target, contents);
@ -259,30 +277,30 @@ describe('app/logging', () => {
describe('#fetch', () => {
it('returns single entry if no files', () => {
return fetch(basePath).then(results => {
return fetch(tmpDir).then(results => {
expect(results).to.have.length(1);
expect(results[0].msg).to.match(/Loaded this list/);
});
});
it('returns sorted entries from all files', () => {
const first = [
JSON.stringify({ msg: 2, time: '2018-01-04T19:17:05.014Z' }),
fakeLogLine({ msg: '2', time: '2018-01-04T19:17:05.014Z' }),
'',
].join('\n');
const second = [
JSON.stringify({ msg: 1, time: '2018-01-04T19:17:00.014Z' }),
JSON.stringify({ msg: 3, time: '2018-01-04T19:18:00.014Z' }),
fakeLogLine({ msg: '1', time: '2018-01-04T19:17:00.014Z' }),
fakeLogLine({ msg: '3', time: '2018-01-04T19:18:00.014Z' }),
'',
].join('\n');
fs.writeFileSync(path.join(basePath, 'first.log'), first);
fs.writeFileSync(path.join(basePath, 'second.log'), second);
fs.writeFileSync(path.join(tmpDir, 'first.log'), first);
fs.writeFileSync(path.join(tmpDir, 'second.log'), second);
return fetch(basePath).then(results => {
return fetch(tmpDir).then(results => {
expect(results).to.have.length(4);
expect(results[0].msg).to.equal(1);
expect(results[1].msg).to.equal(2);
expect(results[2].msg).to.equal(3);
expect(results[0].msg).to.equal('1');
expect(results[1].msg).to.equal('2');
expect(results[2].msg).to.equal('3');
});
});
});

View file

@ -24,7 +24,7 @@ window.waitForAllBatchers = async () => {
export type BatcherOptionsType<ItemType> = {
wait: number;
maxSize: number;
processBatch: (items: Array<ItemType>) => Promise<void>;
processBatch: (items: Array<ItemType>) => void | Promise<void>;
};
export type BatcherType<ItemType> = {

View file

@ -0,0 +1,43 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/**
* Returns `JSON.stringify(value)` if that returns a string, otherwise returns a value
* like `[object Object]` or `[object Undefined]`.
*
* `JSON.stringify` doesn't always return a string. Some examples:
*
* JSON.stringify(undefined) === undefined
*
* JSON.stringify(Symbol()) === undefined
*
* JSON.stringify({ toJSON() {} }) === undefined
*
* const a = {};
* const b = { a };
* a.b = a;
* JSON.stringify(a); // => Throws a TypeError
*
* JSON.stringify(123n); // => Throws a TypeError
*
* const scary = {
* toJSON() {
* throw new Error('uh oh');
* }
* };
* JSON.stringify(scary); // => Throws "uh oh"
*
* This makes sure we return a string and don't throw.
*/
export function reallyJsonStringify(value: unknown): string {
let result: unknown;
try {
result = JSON.stringify(value);
} catch (_err) {
result = undefined;
}
return typeof result === 'string'
? result
: Object.prototype.toString.call(value);
}

9
ts/window.d.ts vendored
View file

@ -24,6 +24,7 @@ import {
} from './libsignal.d';
import { ContactRecordIdentityState, TextSecureType } from './textsecure.d';
import { WebAPIConnectType } from './textsecure/WebAPI';
import { uploadDebugLogs } from './logging/debuglogs';
import { CallingClass } from './services/calling';
import * as Groups from './groups';
import * as Crypto from './Crypto';
@ -142,6 +143,7 @@ declare global {
getInteractionMode: () => 'mouse' | 'keyboard';
getMediaCameraPermissions: () => Promise<boolean>;
getMediaPermissions: () => Promise<boolean>;
getNodeVersion: () => string;
getServerPublicParams: () => string;
getSfuUrl: () => string;
getSocketStatus: () => number;
@ -170,9 +172,14 @@ declare global {
};
libsignal: LibSignalType;
log: {
fatal: LoggerType;
info: LoggerType;
warn: LoggerType;
error: LoggerType;
debug: LoggerType;
trace: LoggerType;
fetch: () => Promise<string>;
publish: typeof uploadDebugLogs;
};
nodeSetImmediate: typeof setImmediate;
normalizeUuids: (obj: any, paths: Array<string>, context: string) => void;
@ -592,7 +599,7 @@ export class CanvasVideoRenderer {
constructor(canvas: Ref<HTMLCanvasElement>);
}
export type LoggerType = (...args: Array<any>) => void;
export type LoggerType = (...args: Array<unknown>) => void;
export type WhisperType = {
events: {

View file

@ -2112,6 +2112,13 @@
"@types/connect" "*"
"@types/node" "*"
"@types/bunyan@1.8.6":
version "1.8.6"
resolved "https://registry.yarnpkg.com/@types/bunyan/-/bunyan-1.8.6.tgz#6527641cca30bedec5feb9ab527b7803b8000582"
integrity sha512-YiozPOOsS6bIuz31ilYqR5SlLif4TBWsousN2aCWLi5233nZSX19tFbcQUPdR7xJ8ypPyxkCGNxg0CIV5n9qxQ==
dependencies:
"@types/node" "*"
"@types/chai@4.1.2":
version "4.1.2"
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.2.tgz#f1af664769cfb50af805431c407425ed619daa21"