Convert logging infrastructure to TypeScript
This commit is contained in:
parent
10ace53845
commit
a8787e7c9e
19 changed files with 595 additions and 331 deletions
317
ts/logging/main_process_logging.ts
Normal file
317
ts/logging/main_process_logging.ts
Normal file
|
@ -0,0 +1,317 @@
|
|||
// Copyright 2017-2021 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// NOTE: Temporarily allow `then` until we convert the entire file to `async` / `await`:
|
||||
/* eslint-disable more/no-then */
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { app, ipcMain as ipc } from 'electron';
|
||||
import * as bunyan from 'bunyan';
|
||||
import * as mkdirp from 'mkdirp';
|
||||
import * as _ from 'lodash';
|
||||
import readFirstLine from 'firstline';
|
||||
import { read as readLastLines } from 'read-last-lines';
|
||||
import rimraf from 'rimraf';
|
||||
|
||||
import {
|
||||
LogEntryType,
|
||||
LogLevel,
|
||||
cleanArgs,
|
||||
getLogLevelString,
|
||||
isLogEntry,
|
||||
} from './shared';
|
||||
|
||||
declare global {
|
||||
// We want to extend `Console`, so we need an interface.
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
interface Console {
|
||||
_log: typeof console.log;
|
||||
_warn: typeof console.warn;
|
||||
_error: typeof console.error;
|
||||
}
|
||||
}
|
||||
|
||||
let globalLogger: undefined | bunyan;
|
||||
|
||||
const isRunningFromConsole = Boolean(process.stdout.isTTY);
|
||||
|
||||
export async function initialize(): Promise<bunyan> {
|
||||
if (globalLogger) {
|
||||
throw new Error('Already called initialize!');
|
||||
}
|
||||
|
||||
const basePath = app.getPath('userData');
|
||||
const logPath = path.join(basePath, 'logs');
|
||||
mkdirp.sync(logPath);
|
||||
|
||||
try {
|
||||
await cleanupLogs(logPath);
|
||||
} catch (error) {
|
||||
const errorString = `Failed to clean logs; deleting all. Error: ${error.stack}`;
|
||||
console.error(errorString);
|
||||
await deleteAllLogs(logPath);
|
||||
mkdirp.sync(logPath);
|
||||
|
||||
// If we want this log entry to persist on disk, we need to wait until we've
|
||||
// set up our logging infrastructure.
|
||||
setTimeout(() => {
|
||||
console.error(errorString);
|
||||
}, 500);
|
||||
}
|
||||
|
||||
const logFile = path.join(logPath, 'log.log');
|
||||
const loggerOptions: bunyan.LoggerOptions = {
|
||||
name: 'log',
|
||||
streams: [
|
||||
{
|
||||
type: 'rotating-file',
|
||||
path: logFile,
|
||||
period: '1d',
|
||||
count: 3,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
if (isRunningFromConsole) {
|
||||
loggerOptions.streams?.push({
|
||||
level: 'debug',
|
||||
stream: process.stdout,
|
||||
});
|
||||
}
|
||||
|
||||
const logger = bunyan.createLogger(loggerOptions);
|
||||
|
||||
ipc.on('batch-log', (_first, batch: unknown) => {
|
||||
if (!Array.isArray(batch)) {
|
||||
logger.error(
|
||||
'batch-log IPC event was called with a non-array; dropping logs'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
batch.forEach(item => {
|
||||
if (isLogEntry(item)) {
|
||||
const levelString = getLogLevelString(item.level);
|
||||
logger[levelString](
|
||||
{
|
||||
time: item.time,
|
||||
},
|
||||
item.msg
|
||||
);
|
||||
} else {
|
||||
logger.error(
|
||||
'batch-log IPC event was called with an invalid log entry; dropping entry'
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
ipc.on('fetch-log', event => {
|
||||
fetch(logPath).then(
|
||||
data => {
|
||||
event.sender.send('fetched-log', data);
|
||||
},
|
||||
error => {
|
||||
logger.error(`Problem loading log from disk: ${error.stack}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
ipc.on('delete-all-logs', async event => {
|
||||
try {
|
||||
await deleteAllLogs(logPath);
|
||||
} catch (error) {
|
||||
logger.error(`Problem deleting all logs: ${error.stack}`);
|
||||
}
|
||||
|
||||
event.sender.send('delete-all-logs-complete');
|
||||
});
|
||||
|
||||
globalLogger = logger;
|
||||
|
||||
return logger;
|
||||
}
|
||||
|
||||
async function deleteAllLogs(logPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
rimraf(
|
||||
logPath,
|
||||
{
|
||||
disableGlob: true,
|
||||
},
|
||||
error => {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
return resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
async function cleanupLogs(logPath: string) {
|
||||
const now = new Date();
|
||||
const earliestDate = new Date(
|
||||
Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate() - 3)
|
||||
);
|
||||
|
||||
try {
|
||||
const remaining = await eliminateOutOfDateFiles(logPath, earliestDate);
|
||||
const files = _.filter(remaining, file => !file.start && file.end);
|
||||
|
||||
if (!files.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
await eliminateOldEntries(files, earliestDate);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error cleaning logs; deleting and starting over from scratch.',
|
||||
error.stack
|
||||
);
|
||||
|
||||
// delete and re-create the log directory
|
||||
await deleteAllLogs(logPath);
|
||||
mkdirp.sync(logPath);
|
||||
}
|
||||
}
|
||||
|
||||
// Exported for testing only.
|
||||
export function isLineAfterDate(line: string, date: Readonly<Date>): boolean {
|
||||
if (!line) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const data = JSON.parse(line);
|
||||
return new Date(data.time).getTime() > date.getTime();
|
||||
} catch (e) {
|
||||
console.log('error parsing log line', e.stack, line);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Exported for testing only.
|
||||
export function eliminateOutOfDateFiles(
|
||||
logPath: string,
|
||||
date: Readonly<Date>
|
||||
): Promise<
|
||||
Array<{
|
||||
path: string;
|
||||
start: boolean;
|
||||
end: boolean;
|
||||
}>
|
||||
> {
|
||||
const files = fs.readdirSync(logPath);
|
||||
const paths = files.map(file => path.join(logPath, file));
|
||||
|
||||
return Promise.all(
|
||||
_.map(paths, target =>
|
||||
Promise.all([readFirstLine(target), readLastLines(target, 2)]).then(
|
||||
results => {
|
||||
const start = results[0];
|
||||
const end = results[1].split('\n');
|
||||
|
||||
const file = {
|
||||
path: target,
|
||||
start: isLineAfterDate(start, date),
|
||||
end:
|
||||
isLineAfterDate(end[end.length - 1], date) ||
|
||||
isLineAfterDate(end[end.length - 2], date),
|
||||
};
|
||||
|
||||
if (!file.start && !file.end) {
|
||||
fs.unlinkSync(file.path);
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Exported for testing only.
|
||||
export async function eliminateOldEntries(
|
||||
files: ReadonlyArray<{ path: string }>,
|
||||
date: Readonly<Date>
|
||||
): Promise<void> {
|
||||
await Promise.all(
|
||||
_.map(files, file =>
|
||||
fetchLog(file.path).then(lines => {
|
||||
const recent = _.filter(lines, line => new Date(line.time) >= date);
|
||||
const text = _.map(recent, line => JSON.stringify(line)).join('\n');
|
||||
|
||||
return fs.writeFileSync(file.path, `${text}\n`);
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Exported for testing only.
|
||||
export function fetchLog(logFile: string): Promise<Array<LogEntryType>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(logFile, { encoding: 'utf8' }, (err, text) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const lines = _.compact(text.split('\n'));
|
||||
const data = _.compact(
|
||||
lines.map(line => {
|
||||
try {
|
||||
const result = _.pick(JSON.parse(line), ['level', 'time', 'msg']);
|
||||
return isLogEntry(result) ? result : null;
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Exported for testing only.
|
||||
export function fetch(logPath: string): Promise<Array<LogEntryType>> {
|
||||
const files = fs.readdirSync(logPath);
|
||||
const paths = files.map(file => path.join(logPath, file));
|
||||
|
||||
// creating a manual log entry for the final log result
|
||||
const fileListEntry: LogEntryType = {
|
||||
level: LogLevel.Info,
|
||||
time: new Date().toISOString(),
|
||||
msg: `Loaded this list of log files from logPath: ${files.join(', ')}`,
|
||||
};
|
||||
|
||||
return Promise.all(paths.map(fetchLog)).then(results => {
|
||||
const data = _.flatten(results);
|
||||
|
||||
data.push(fileListEntry);
|
||||
|
||||
return _.sortBy(data, logEntry => logEntry.time);
|
||||
});
|
||||
}
|
||||
|
||||
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>) {
|
||||
if (globalLogger) {
|
||||
const levelString = getLogLevelString(level);
|
||||
globalLogger[levelString](cleanArgs(args));
|
||||
} else if (isRunningFromConsole) {
|
||||
console._log(...args);
|
||||
}
|
||||
}
|
||||
|
||||
// This blows up using mocha --watch, so we ensure it is run just once
|
||||
if (!console._log) {
|
||||
console._log = console.log;
|
||||
console.log = _.partial(logAtLevel, LogLevel.Info);
|
||||
console._error = console.error;
|
||||
console.error = _.partial(logAtLevel, LogLevel.Error);
|
||||
console._warn = console.warn;
|
||||
console.warn = _.partial(logAtLevel, LogLevel.Warn);
|
||||
}
|
150
ts/logging/set_up_renderer_logging.ts
Normal file
150
ts/logging/set_up_renderer_logging.ts
Normal file
|
@ -0,0 +1,150 @@
|
|||
// Copyright 2017-2021 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
/* eslint-env node */
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { ipcRenderer as ipc } from 'electron';
|
||||
import _ from 'lodash';
|
||||
import { levelFromName } from 'bunyan';
|
||||
|
||||
import { uploadDebugLogs } from './debuglogs';
|
||||
import { redactAll } from '../../js/modules/privacy';
|
||||
import { createBatcher } from '../util/batcher';
|
||||
import {
|
||||
LogEntryType,
|
||||
LogLevel,
|
||||
cleanArgs,
|
||||
getLogLevelString,
|
||||
isLogEntry,
|
||||
} from './shared';
|
||||
import { reallyJsonStringify } from '../util/reallyJsonStringify';
|
||||
|
||||
// To make it easier to visually scan logs, we make all levels the same length
|
||||
const levelMaxLength: number = Object.keys(levelFromName).reduce(
|
||||
(maxLength, level) => Math.max(maxLength, level.length),
|
||||
0
|
||||
);
|
||||
|
||||
// Backwards-compatible logging, simple strings and no level (defaulted to INFO)
|
||||
function now() {
|
||||
const date = new Date();
|
||||
return date.toJSON();
|
||||
}
|
||||
|
||||
function log(...args: ReadonlyArray<unknown>) {
|
||||
logAtLevel(LogLevel.Info, ...args);
|
||||
}
|
||||
|
||||
if (window.console) {
|
||||
console._log = console.log;
|
||||
console.log = log;
|
||||
}
|
||||
|
||||
// The mechanics of preparing a log for publish
|
||||
|
||||
function getHeader() {
|
||||
let header = window.navigator.userAgent;
|
||||
|
||||
header += ` node/${window.getNodeVersion()}`;
|
||||
header += ` env/${window.getEnvironment()}`;
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
const getLevel = _.memoize((level: LogLevel): string => {
|
||||
const text = getLogLevelString(level);
|
||||
return text.toUpperCase().padEnd(levelMaxLength, ' ');
|
||||
});
|
||||
|
||||
function formatLine(mightBeEntry: Readonly<unknown>): string {
|
||||
const entry: LogEntryType = isLogEntry(mightBeEntry)
|
||||
? mightBeEntry
|
||||
: {
|
||||
level: LogLevel.Error,
|
||||
msg: `Invalid IPC data when fetching logs. Here's what we could recover: ${reallyJsonStringify(
|
||||
mightBeEntry
|
||||
)}`,
|
||||
time: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return `${getLevel(entry.level)} ${entry.time} ${entry.msg}`;
|
||||
}
|
||||
|
||||
function fetch(): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
ipc.send('fetch-log');
|
||||
|
||||
ipc.on('fetched-log', (_event, logEntries: unknown) => {
|
||||
let body: string;
|
||||
if (Array.isArray(logEntries)) {
|
||||
body = logEntries.map(formatLine).join('\n');
|
||||
} else {
|
||||
const entry: LogEntryType = {
|
||||
level: LogLevel.Error,
|
||||
msg: 'Invalid IPC data when fetching logs; dropping all logs',
|
||||
time: new Date().toISOString(),
|
||||
};
|
||||
body = formatLine(entry);
|
||||
}
|
||||
|
||||
const result = `${getHeader()}\n${redactAll(body)}`;
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const publish = uploadDebugLogs;
|
||||
|
||||
// A modern logging interface for the browser
|
||||
|
||||
const env = window.getEnvironment();
|
||||
const IS_PRODUCTION = env === 'production';
|
||||
|
||||
const ipcBatcher = createBatcher({
|
||||
wait: 500,
|
||||
maxSize: 500,
|
||||
processBatch: (items: Array<LogEntryType>) => {
|
||||
ipc.send('batch-log', items);
|
||||
},
|
||||
});
|
||||
|
||||
// The Bunyan API: https://github.com/trentm/node-bunyan#log-method-api
|
||||
function logAtLevel(level: LogLevel, ...args: ReadonlyArray<unknown>): void {
|
||||
if (!IS_PRODUCTION) {
|
||||
const prefix = getLogLevelString(level)
|
||||
.toUpperCase()
|
||||
.padEnd(levelMaxLength, ' ');
|
||||
console._log(prefix, now(), ...args);
|
||||
}
|
||||
|
||||
ipcBatcher.add({
|
||||
level,
|
||||
msg: cleanArgs(args),
|
||||
time: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
window.log = {
|
||||
fatal: _.partial(logAtLevel, LogLevel.Fatal),
|
||||
error: _.partial(logAtLevel, LogLevel.Error),
|
||||
warn: _.partial(logAtLevel, LogLevel.Warn),
|
||||
info: _.partial(logAtLevel, LogLevel.Info),
|
||||
debug: _.partial(logAtLevel, LogLevel.Debug),
|
||||
trace: _.partial(logAtLevel, LogLevel.Trace),
|
||||
fetch,
|
||||
publish,
|
||||
};
|
||||
|
||||
window.onerror = (_message, _script, _line, _col, error) => {
|
||||
const errorInfo = error && error.stack ? error.stack : JSON.stringify(error);
|
||||
window.log.error(`Top-level unhandled error: ${errorInfo}`);
|
||||
};
|
||||
|
||||
window.addEventListener('unhandledrejection', rejectionEvent => {
|
||||
const error = rejectionEvent.reason;
|
||||
const errorString =
|
||||
error && error.stack ? error.stack : JSON.stringify(error);
|
||||
window.log.error(`Top-level unhandled promise rejection: ${errorString}`);
|
||||
});
|
81
ts/logging/shared.ts
Normal file
81
ts/logging/shared.ts
Normal file
|
@ -0,0 +1,81 @@
|
|||
// Copyright 2021 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import * as bunyan from 'bunyan';
|
||||
import { redactAll } from '../../js/modules/privacy';
|
||||
import { missingCaseError } from '../util/missingCaseError';
|
||||
import { reallyJsonStringify } from '../util/reallyJsonStringify';
|
||||
|
||||
// These match [Bunyan's recommendations][0].
|
||||
// [0]: https://www.npmjs.com/package/bunyan#levels
|
||||
export enum LogLevel {
|
||||
Fatal = 60,
|
||||
Error = 50,
|
||||
Warn = 40,
|
||||
Info = 30,
|
||||
Debug = 20,
|
||||
Trace = 10,
|
||||
}
|
||||
|
||||
// These match [Bunyan's core fields][1].
|
||||
// [1]: https://www.npmjs.com/package/bunyan#core-fields
|
||||
export type LogEntryType = {
|
||||
level: LogLevel;
|
||||
msg: string;
|
||||
time: string;
|
||||
};
|
||||
|
||||
const logLevels = new Set<LogLevel>([
|
||||
LogLevel.Fatal,
|
||||
LogLevel.Error,
|
||||
LogLevel.Warn,
|
||||
LogLevel.Info,
|
||||
LogLevel.Debug,
|
||||
LogLevel.Trace,
|
||||
]);
|
||||
function isLogLevel(value: unknown): value is LogLevel {
|
||||
return typeof value === 'number' && logLevels.has(value);
|
||||
}
|
||||
|
||||
function isValidTime(value: unknown): value is string {
|
||||
return typeof value === 'string' && !Number.isNaN(new Date(value).getTime());
|
||||
}
|
||||
|
||||
export function isLogEntry(value: unknown): value is LogEntryType {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { level, time, msg } = value as Record<string, unknown>;
|
||||
|
||||
return typeof msg === 'string' && isLogLevel(level) && isValidTime(time);
|
||||
}
|
||||
|
||||
export function getLogLevelString(value: LogLevel): bunyan.LogLevelString {
|
||||
switch (value) {
|
||||
case LogLevel.Fatal:
|
||||
return 'fatal';
|
||||
case LogLevel.Error:
|
||||
return 'error';
|
||||
case LogLevel.Warn:
|
||||
return 'warn';
|
||||
case LogLevel.Info:
|
||||
return 'info';
|
||||
case LogLevel.Debug:
|
||||
return 'debug';
|
||||
case LogLevel.Trace:
|
||||
return 'trace';
|
||||
default:
|
||||
throw missingCaseError(value);
|
||||
}
|
||||
}
|
||||
|
||||
export function cleanArgs(args: ReadonlyArray<unknown>): string {
|
||||
return redactAll(
|
||||
args
|
||||
.map(item =>
|
||||
typeof item === 'string' ? item : reallyJsonStringify(item)
|
||||
)
|
||||
.join(' ')
|
||||
);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue