Use window.log in browser context, turn on console eslint rule
This commit is contained in:
parent
4320b125dd
commit
5933a34a18
71 changed files with 816 additions and 559 deletions
|
@ -134,7 +134,7 @@ function exportContactsAndGroups(db, fileWriter) {
|
|||
if (storeNames.length === 0) {
|
||||
throw new Error('No stores to export');
|
||||
}
|
||||
console.log('Exporting from these stores:', storeNames.join(', '));
|
||||
window.log.info('Exporting from these stores:', storeNames.join(', '));
|
||||
|
||||
const stream = createOutputStream(fileWriter);
|
||||
|
||||
|
@ -153,7 +153,7 @@ function exportContactsAndGroups(db, fileWriter) {
|
|||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
console.log('transaction complete');
|
||||
window.log.info('transaction complete');
|
||||
};
|
||||
|
||||
const store = transaction.objectStore(storeName);
|
||||
|
@ -168,7 +168,7 @@ function exportContactsAndGroups(db, fileWriter) {
|
|||
};
|
||||
request.onsuccess = async event => {
|
||||
if (count === 0) {
|
||||
console.log('cursor opened');
|
||||
window.log.info('cursor opened');
|
||||
stream.write(`"${storeName}": [`);
|
||||
}
|
||||
|
||||
|
@ -188,17 +188,17 @@ function exportContactsAndGroups(db, fileWriter) {
|
|||
} else {
|
||||
// no more
|
||||
stream.write(']');
|
||||
console.log('Exported', count, 'items from store', storeName);
|
||||
window.log.info('Exported', count, 'items from store', storeName);
|
||||
|
||||
exportedStoreNames.push(storeName);
|
||||
if (exportedStoreNames.length < storeNames.length) {
|
||||
stream.write(',');
|
||||
} else {
|
||||
console.log('Exported all stores');
|
||||
window.log.info('Exported all stores');
|
||||
stream.write('}');
|
||||
|
||||
await stream.close();
|
||||
console.log('Finished writing all stores to disk');
|
||||
window.log.info('Finished writing all stores to disk');
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
|
@ -215,11 +215,11 @@ async function importNonMessages(db, parent, options) {
|
|||
|
||||
function eliminateClientConfigInBackup(data, targetPath) {
|
||||
const cleaned = _.pick(data, 'conversations', 'groups');
|
||||
console.log('Writing configuration-free backup file back to disk');
|
||||
window.log.info('Writing configuration-free backup file back to disk');
|
||||
try {
|
||||
fs.writeFileSync(targetPath, JSON.stringify(cleaned));
|
||||
} catch (error) {
|
||||
console.log('Error writing cleaned-up backup to disk: ', error.stack);
|
||||
window.log.error('Error writing cleaned-up backup to disk: ', error.stack);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,7 +250,9 @@ function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
delete importObject.sessions;
|
||||
delete importObject.unprocessed;
|
||||
|
||||
console.log('This is a light import; contacts, groups and messages only');
|
||||
window.log.info(
|
||||
'This is a light import; contacts, groups and messages only'
|
||||
);
|
||||
}
|
||||
|
||||
// We mutate the on-disk backup to prevent the user from importing client
|
||||
|
@ -259,11 +261,11 @@ function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
eliminateClientConfigInBackup(importObject, targetPath);
|
||||
|
||||
const storeNames = _.keys(importObject);
|
||||
console.log('Importing to these stores:', storeNames.join(', '));
|
||||
window.log.info('Importing to these stores:', storeNames.join(', '));
|
||||
|
||||
let finished = false;
|
||||
const finish = via => {
|
||||
console.log('non-messages import done via', via);
|
||||
window.log.info('non-messages import done via', via);
|
||||
if (finished) {
|
||||
resolve(result);
|
||||
}
|
||||
|
@ -281,7 +283,7 @@ function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
transaction.oncomplete = finish.bind(null, 'transaction complete');
|
||||
|
||||
_.each(storeNames, storeName => {
|
||||
console.log('Importing items for store', storeName);
|
||||
window.log.info('Importing items for store', storeName);
|
||||
|
||||
if (!importObject[storeName].length) {
|
||||
delete importObject[storeName];
|
||||
|
@ -294,7 +296,7 @@ function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
const finishStore = () => {
|
||||
// added all objects for this store
|
||||
delete importObject[storeName];
|
||||
console.log(
|
||||
window.log.info(
|
||||
'Done importing to store',
|
||||
storeName,
|
||||
'Total count:',
|
||||
|
@ -304,7 +306,7 @@ function importFromJsonString(db, jsonString, targetPath, options) {
|
|||
);
|
||||
if (_.keys(importObject).length === 0) {
|
||||
// added all object stores
|
||||
console.log('DB import complete');
|
||||
window.log.info('DB import complete');
|
||||
finish('puts scheduled');
|
||||
}
|
||||
};
|
||||
|
@ -455,7 +457,7 @@ async function readAttachment(dir, attachment, name, options) {
|
|||
const targetPath = path.join(dir, sanitizedName);
|
||||
|
||||
if (!fs.existsSync(targetPath)) {
|
||||
console.log(`Warning: attachment ${sanitizedName} not found`);
|
||||
window.log.warn(`Warning: attachment ${sanitizedName} not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -518,7 +520,7 @@ async function writeThumbnails(rawQuotedAttachments, options) {
|
|||
)
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'writeThumbnails: error exporting conversation',
|
||||
name,
|
||||
':',
|
||||
|
@ -560,7 +562,7 @@ async function writeAttachments(rawAttachments, options) {
|
|||
try {
|
||||
await Promise.all(promises);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'writeAttachments: error exporting conversation',
|
||||
name,
|
||||
':',
|
||||
|
@ -571,7 +573,6 @@ async function writeAttachments(rawAttachments, options) {
|
|||
}
|
||||
|
||||
async function writeAvatar(avatar, options) {
|
||||
console.log('writeAvatar', { avatar, options });
|
||||
const { dir, message, index, key, newKey } = options;
|
||||
const name = _getAnonymousAttachmentFileName(message, index);
|
||||
const filename = `${name}-contact-avatar`;
|
||||
|
@ -618,7 +619,7 @@ async function writeContactAvatars(contact, options) {
|
|||
)
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'writeContactAvatars: error exporting conversation',
|
||||
name,
|
||||
':',
|
||||
|
@ -633,10 +634,10 @@ async function writeEncryptedAttachment(target, data, options = {}) {
|
|||
|
||||
if (fs.existsSync(target)) {
|
||||
if (newKey) {
|
||||
console.log(`Deleting attachment ${filename}; key has changed`);
|
||||
window.log.info(`Deleting attachment ${filename}; key has changed`);
|
||||
fs.unlinkSync(target);
|
||||
} else {
|
||||
console.log(`Skipping attachment ${filename}; already exists`);
|
||||
window.log.info(`Skipping attachment ${filename}; already exists`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -669,7 +670,7 @@ async function exportConversation(db, conversation, options) {
|
|||
throw new Error('Need a key to encrypt with!');
|
||||
}
|
||||
|
||||
console.log('exporting conversation', name);
|
||||
window.log.info('exporting conversation', name);
|
||||
const writer = await createFileAndWriter(dir, 'messages.json');
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
|
@ -792,7 +793,7 @@ async function exportConversation(db, conversation, options) {
|
|||
try {
|
||||
await Promise.all([stream.write(']}'), promiseChain, stream.close()]);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'exportConversation: error exporting conversation',
|
||||
name,
|
||||
':',
|
||||
|
@ -802,7 +803,7 @@ async function exportConversation(db, conversation, options) {
|
|||
return;
|
||||
}
|
||||
|
||||
console.log('done exporting conversation', name);
|
||||
window.log.info('done exporting conversation', name);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
@ -888,12 +889,12 @@ function exportConversations(db, options) {
|
|||
});
|
||||
};
|
||||
|
||||
console.log('scheduling export for conversation', name);
|
||||
window.log.info('scheduling export for conversation', name);
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(process);
|
||||
cursor.continue();
|
||||
} else {
|
||||
console.log('Done scheduling conversation exports');
|
||||
window.log.info('Done scheduling conversation exports');
|
||||
try {
|
||||
await promiseChain;
|
||||
} catch (error) {
|
||||
|
@ -979,7 +980,7 @@ async function loadAttachments(dir, getName, options) {
|
|||
})
|
||||
);
|
||||
|
||||
console.log('loadAttachments', { message });
|
||||
window.log.info('loadAttachments', { message });
|
||||
}
|
||||
|
||||
function saveMessage(db, message) {
|
||||
|
@ -1000,7 +1001,7 @@ async function saveAllMessages(db, rawMessages) {
|
|||
return new Promise((resolve, reject) => {
|
||||
let finished = false;
|
||||
const finish = via => {
|
||||
console.log('messages done saving via', via);
|
||||
window.log.info('messages done saving via', via);
|
||||
if (finished) {
|
||||
resolve();
|
||||
}
|
||||
|
@ -1026,7 +1027,7 @@ async function saveAllMessages(db, rawMessages) {
|
|||
request.onsuccess = () => {
|
||||
count += 1;
|
||||
if (count === messages.length) {
|
||||
console.log(
|
||||
window.log.info(
|
||||
'Saved',
|
||||
messages.length,
|
||||
'messages for conversation',
|
||||
|
@ -1066,7 +1067,9 @@ async function importConversation(db, dir, options) {
|
|||
try {
|
||||
contents = await readFileAsText(dir, 'messages.json');
|
||||
} catch (error) {
|
||||
console.log(`Warning: could not access messages.json in directory: ${dir}`);
|
||||
window.log.error(
|
||||
`Warning: could not access messages.json in directory: ${dir}`
|
||||
);
|
||||
}
|
||||
|
||||
let promiseChain = Promise.resolve();
|
||||
|
@ -1120,7 +1123,7 @@ async function importConversation(db, dir, options) {
|
|||
await saveAllMessages(db, messages);
|
||||
|
||||
await promiseChain;
|
||||
console.log(
|
||||
window.log.info(
|
||||
'Finished importing conversation',
|
||||
conversationId,
|
||||
'Total:',
|
||||
|
@ -1208,7 +1211,7 @@ function assembleLookup(db, storeName, keyFunction) {
|
|||
lookup[keyFunction(cursor.value)] = true;
|
||||
cursor.continue();
|
||||
} else {
|
||||
console.log(`Done creating ${storeName} lookup`);
|
||||
window.log.info(`Done creating ${storeName} lookup`);
|
||||
resolve(lookup);
|
||||
}
|
||||
};
|
||||
|
@ -1236,7 +1239,7 @@ function createZip(zipDir, targetDir) {
|
|||
});
|
||||
|
||||
archive.on('warning', error => {
|
||||
console.log(`Archive generation warning: ${error.stack}`);
|
||||
window.log.warn(`Archive generation warning: ${error.stack}`);
|
||||
});
|
||||
archive.on('error', reject);
|
||||
|
||||
|
@ -1286,7 +1289,7 @@ function createTempDir() {
|
|||
}
|
||||
|
||||
function deleteAll(pattern) {
|
||||
console.log(`Deleting ${pattern}`);
|
||||
window.log.info(`Deleting ${pattern}`);
|
||||
return pify(rimraf)(pattern);
|
||||
}
|
||||
|
||||
|
@ -1320,10 +1323,10 @@ async function exportToDirectory(directory, options) {
|
|||
const zip = await createZip(encryptionDir, stagingDir);
|
||||
await encryptFile(zip, path.join(directory, 'messages.zip'), options);
|
||||
|
||||
console.log('done backing up!');
|
||||
window.log.info('done backing up!');
|
||||
return directory;
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'The backup went wrong!',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
|
@ -1392,7 +1395,7 @@ async function importFromDirectory(directory, options) {
|
|||
const result = await importNonMessages(db, stagingDir, options);
|
||||
await importConversations(db, stagingDir, Object.assign({}, options));
|
||||
|
||||
console.log('Done importing from backup!');
|
||||
window.log.info('Done importing from backup!');
|
||||
return result;
|
||||
} finally {
|
||||
if (stagingDir) {
|
||||
|
@ -1407,10 +1410,10 @@ async function importFromDirectory(directory, options) {
|
|||
const result = await importNonMessages(db, directory, options);
|
||||
await importConversations(db, directory, options);
|
||||
|
||||
console.log('Done importing!');
|
||||
window.log.info('Done importing!');
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.log(
|
||||
window.log.error(
|
||||
'The import went wrong!',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
/* eslint-env node */
|
||||
/* global log */
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
@ -56,7 +57,7 @@ exports.createConversation = async ({
|
|||
await Promise.all(
|
||||
range(0, numMessages).map(async index => {
|
||||
await sleep(index * 100);
|
||||
console.log(`Create message ${index + 1}`);
|
||||
log.info(`Create message ${index + 1}`);
|
||||
const messageAttributes = await createRandomMessage({ conversationId });
|
||||
const message = new WhisperMessage(messageAttributes);
|
||||
return deferredToPromise(message.save());
|
||||
|
@ -107,7 +108,7 @@ const createRandomMessage = async ({ conversationId } = {}) => {
|
|||
};
|
||||
|
||||
const message = _createMessage({ commonProperties, conversationId, type });
|
||||
return Message.initializeSchemaVersion(message);
|
||||
return Message.initializeSchemaVersion({ message, logger: log });
|
||||
};
|
||||
|
||||
const _createMessage = ({ commonProperties, conversationId, type } = {}) => {
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
const addUnhandledErrorHandler = require('electron-unhandled');
|
||||
|
||||
const Errors = require('./types/errors');
|
||||
|
||||
// addHandler :: Unit -> Unit
|
||||
exports.addHandler = () => {
|
||||
addUnhandledErrorHandler({
|
||||
logger: error => {
|
||||
console.error(
|
||||
'Uncaught error or unhandled promise rejection:',
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
},
|
||||
showDialog: false,
|
||||
});
|
||||
};
|
|
@ -1,4 +1,5 @@
|
|||
/* eslint-env node */
|
||||
/* global log */
|
||||
|
||||
exports.setup = (locale, messages) => {
|
||||
if (!locale) {
|
||||
|
@ -11,7 +12,7 @@ exports.setup = (locale, messages) => {
|
|||
function getMessage(key, substitutions) {
|
||||
const entry = messages[key];
|
||||
if (!entry) {
|
||||
console.error(
|
||||
log.error(
|
||||
`i18n: Attempted to get translation for nonexistent key '${key}'`
|
||||
);
|
||||
return '';
|
||||
|
|
|
@ -13,7 +13,7 @@ class IdleDetector extends EventEmitter {
|
|||
}
|
||||
|
||||
start() {
|
||||
console.log('Start idle detector');
|
||||
window.log.info('Start idle detector');
|
||||
this._scheduleNextCallback();
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,7 @@ class IdleDetector extends EventEmitter {
|
|||
return;
|
||||
}
|
||||
|
||||
console.log('Stop idle detector');
|
||||
window.log.info('Stop idle detector');
|
||||
this._clearScheduledCallbacks();
|
||||
}
|
||||
|
||||
|
|
|
@ -82,6 +82,7 @@ exports.dangerouslyProcessAllWithoutIndex = async ({
|
|||
minDatabaseVersion,
|
||||
numMessagesPerBatch,
|
||||
upgradeMessageSchema,
|
||||
logger,
|
||||
} = {}) => {
|
||||
if (!isString(databaseName)) {
|
||||
throw new TypeError("'databaseName' must be a string");
|
||||
|
@ -102,7 +103,7 @@ exports.dangerouslyProcessAllWithoutIndex = async ({
|
|||
const connection = await database.open(databaseName);
|
||||
const databaseVersion = connection.version;
|
||||
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
|
||||
console.log('Database status', {
|
||||
logger.info('Database status', {
|
||||
databaseVersion,
|
||||
isValidDatabaseVersion,
|
||||
minDatabaseVersion,
|
||||
|
@ -133,7 +134,7 @@ exports.dangerouslyProcessAllWithoutIndex = async ({
|
|||
break;
|
||||
}
|
||||
numCumulativeMessagesProcessed += status.numMessagesProcessed;
|
||||
console.log(
|
||||
logger.info(
|
||||
'Upgrade message schema:',
|
||||
Object.assign({}, status, {
|
||||
numTotalMessages,
|
||||
|
@ -142,11 +143,11 @@ exports.dangerouslyProcessAllWithoutIndex = async ({
|
|||
);
|
||||
}
|
||||
|
||||
console.log('Close database connection');
|
||||
logger.info('Close database connection');
|
||||
connection.close();
|
||||
|
||||
const totalDuration = Date.now() - migrationStartTime;
|
||||
console.log('Attachment migration complete:', {
|
||||
logger.info('Attachment migration complete:', {
|
||||
totalDuration,
|
||||
totalMessagesProcessed: numCumulativeMessagesProcessed,
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
exports.run = transaction => {
|
||||
exports.run = ({ transaction, logger }) => {
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
|
||||
console.log("Create message attachment metadata index: 'hasAttachments'");
|
||||
logger.info("Create message attachment metadata index: 'hasAttachments'");
|
||||
messagesStore.createIndex(
|
||||
'hasAttachments',
|
||||
['conversationId', 'hasAttachments', 'received_at'],
|
||||
|
@ -9,7 +9,7 @@ exports.run = transaction => {
|
|||
);
|
||||
|
||||
['hasVisualMediaAttachments', 'hasFileAttachments'].forEach(name => {
|
||||
console.log(`Create message attachment metadata index: '${name}'`);
|
||||
logger.info(`Create message attachment metadata index: '${name}'`);
|
||||
messagesStore.createIndex(name, ['conversationId', 'received_at', name], {
|
||||
unique: false,
|
||||
});
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
/* global window */
|
||||
|
||||
const { isString, last } = require('lodash');
|
||||
|
||||
const { runMigrations } = require('./run_migrations');
|
||||
|
@ -12,8 +14,8 @@ const migrations = [
|
|||
{
|
||||
version: '12.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 12');
|
||||
console.log('creating object stores');
|
||||
window.log.info('Migration 12');
|
||||
window.log.info('creating object stores');
|
||||
const messages = transaction.db.createObjectStore('messages');
|
||||
messages.createIndex('conversation', ['conversationId', 'received_at'], {
|
||||
unique: false,
|
||||
|
@ -46,7 +48,7 @@ const migrations = [
|
|||
transaction.db.createObjectStore('signedPreKeys');
|
||||
transaction.db.createObjectStore('items');
|
||||
|
||||
console.log('creating debug log');
|
||||
window.log.info('creating debug log');
|
||||
transaction.db.createObjectStore('debug');
|
||||
|
||||
next();
|
||||
|
@ -55,8 +57,8 @@ const migrations = [
|
|||
{
|
||||
version: '13.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 13');
|
||||
console.log('Adding fields to identity keys');
|
||||
window.log.info('Migration 13');
|
||||
window.log.info('Adding fields to identity keys');
|
||||
const identityKeys = transaction.objectStore('identityKeys');
|
||||
const request = identityKeys.openCursor();
|
||||
const promises = [];
|
||||
|
@ -72,9 +74,9 @@ const migrations = [
|
|||
new Promise((resolve, reject) => {
|
||||
const putRequest = identityKeys.put(attributes, attributes.id);
|
||||
putRequest.onsuccess = resolve;
|
||||
putRequest.onerror = e => {
|
||||
console.log(e);
|
||||
reject(e);
|
||||
putRequest.onerror = error => {
|
||||
window.log.error(error && error.stack ? error.stack : error);
|
||||
reject(error);
|
||||
};
|
||||
})
|
||||
);
|
||||
|
@ -88,15 +90,15 @@ const migrations = [
|
|||
}
|
||||
};
|
||||
request.onerror = event => {
|
||||
console.log(event);
|
||||
window.log.error(event);
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '14.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 14');
|
||||
console.log('Adding unprocessed message store');
|
||||
window.log.info('Migration 14');
|
||||
window.log.info('Adding unprocessed message store');
|
||||
const unprocessed = transaction.db.createObjectStore('unprocessed');
|
||||
unprocessed.createIndex('received', 'timestamp', { unique: false });
|
||||
next();
|
||||
|
@ -105,8 +107,8 @@ const migrations = [
|
|||
{
|
||||
version: '15.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 15');
|
||||
console.log('Adding messages index for de-duplication');
|
||||
window.log.info('Migration 15');
|
||||
window.log.info('Adding messages index for de-duplication');
|
||||
const messages = transaction.objectStore('messages');
|
||||
messages.createIndex('unique', ['source', 'sourceDevice', 'sent_at'], {
|
||||
unique: true,
|
||||
|
@ -117,8 +119,8 @@ const migrations = [
|
|||
{
|
||||
version: '16.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 16');
|
||||
console.log('Dropping log table, since we now log to disk');
|
||||
window.log.info('Migration 16');
|
||||
window.log.info('Dropping log table, since we now log to disk');
|
||||
transaction.db.deleteObjectStore('debug');
|
||||
next();
|
||||
},
|
||||
|
@ -126,19 +128,21 @@ const migrations = [
|
|||
{
|
||||
version: 17,
|
||||
async migrate(transaction, next) {
|
||||
console.log('Migration 17');
|
||||
window.log.info('Migration 17');
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
console.log('Create index from attachment schema version to attachment');
|
||||
window.log.info(
|
||||
'Create index from attachment schema version to attachment'
|
||||
);
|
||||
messagesStore.createIndex('schemaVersion', 'schemaVersion', {
|
||||
unique: false,
|
||||
});
|
||||
|
||||
const duration = Date.now() - start;
|
||||
|
||||
console.log(
|
||||
window.log.info(
|
||||
'Complete migration to database version 17',
|
||||
`Duration: ${duration}ms`
|
||||
);
|
||||
|
@ -148,13 +152,13 @@ const migrations = [
|
|||
{
|
||||
version: 18,
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 18');
|
||||
window.log.info('Migration 18');
|
||||
|
||||
const start = Date.now();
|
||||
Migration18.run(transaction);
|
||||
Migration18.run({ transaction, logger: window.log });
|
||||
const duration = Date.now() - start;
|
||||
|
||||
console.log(
|
||||
window.log.info(
|
||||
'Complete migration to database version 18',
|
||||
`Duration: ${duration}ms`
|
||||
);
|
||||
|
@ -169,9 +173,10 @@ const database = {
|
|||
migrations,
|
||||
};
|
||||
|
||||
exports.run = ({ Backbone, databaseName } = {}) =>
|
||||
exports.run = ({ Backbone, databaseName, logger } = {}) =>
|
||||
runMigrations({
|
||||
Backbone,
|
||||
logger,
|
||||
database: Object.assign(
|
||||
{},
|
||||
database,
|
||||
|
|
|
@ -16,7 +16,7 @@ const migrations = [
|
|||
// },
|
||||
];
|
||||
|
||||
exports.run = async ({ Backbone, database } = {}) => {
|
||||
exports.run = async ({ Backbone, database, logger } = {}) => {
|
||||
const { canRun } = await exports.getStatus({ database });
|
||||
if (!canRun) {
|
||||
throw new Error(
|
||||
|
@ -24,7 +24,7 @@ exports.run = async ({ Backbone, database } = {}) => {
|
|||
);
|
||||
}
|
||||
|
||||
await runMigrations({ Backbone, database });
|
||||
await runMigrations({ Backbone, database, logger });
|
||||
};
|
||||
|
||||
exports.getStatus = async ({ database } = {}) => {
|
||||
|
|
|
@ -8,13 +8,13 @@ const { deferredToPromise } = require('../deferred_to_promise');
|
|||
const closeDatabaseConnection = ({ Backbone } = {}) =>
|
||||
deferredToPromise(Backbone.sync('closeall'));
|
||||
|
||||
exports.runMigrations = async ({ Backbone, database } = {}) => {
|
||||
exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
|
||||
if (
|
||||
!isObject(Backbone) ||
|
||||
!isObject(Backbone.Collection) ||
|
||||
!isFunction(Backbone.Collection.extend)
|
||||
) {
|
||||
throw new TypeError("'Backbone' is required");
|
||||
throw new TypeError('runMigrations: Backbone is required');
|
||||
}
|
||||
|
||||
if (
|
||||
|
@ -22,7 +22,10 @@ exports.runMigrations = async ({ Backbone, database } = {}) => {
|
|||
!isString(database.id) ||
|
||||
!Array.isArray(database.migrations)
|
||||
) {
|
||||
throw new TypeError("'database' is required");
|
||||
throw new TypeError('runMigrations: database is required');
|
||||
}
|
||||
if (!isObject(logger)) {
|
||||
throw new TypeError('runMigrations: logger is required');
|
||||
}
|
||||
|
||||
const {
|
||||
|
@ -33,7 +36,7 @@ exports.runMigrations = async ({ Backbone, database } = {}) => {
|
|||
const databaseVersion = await db.getVersion(database.id);
|
||||
const isAlreadyUpgraded = databaseVersion >= lastMigrationVersion;
|
||||
|
||||
console.log('Database status', {
|
||||
logger.info('Database status', {
|
||||
firstMigrationVersion,
|
||||
lastMigrationVersion,
|
||||
databaseVersion,
|
||||
|
@ -50,7 +53,7 @@ exports.runMigrations = async ({ Backbone, database } = {}) => {
|
|||
}))();
|
||||
|
||||
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
|
||||
console.log('Close database connection');
|
||||
logger.info('Close database connection');
|
||||
await closeDatabaseConnection({ Backbone });
|
||||
};
|
||||
|
||||
|
|
|
@ -86,6 +86,7 @@ function initializeMigrations({
|
|||
Attachments,
|
||||
Type,
|
||||
VisualType,
|
||||
logger,
|
||||
}) {
|
||||
if (!Attachments) {
|
||||
return null;
|
||||
|
@ -131,15 +132,17 @@ function initializeMigrations({
|
|||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
}),
|
||||
writeMessageAttachments: MessageType.createAttachmentDataWriter(
|
||||
createWriterForExisting(attachmentsPath)
|
||||
),
|
||||
writeMessageAttachments: MessageType.createAttachmentDataWriter({
|
||||
writeExistingAttachmentData: createWriterForExisting(attachmentsPath),
|
||||
logger,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
exports.setup = (options = {}) => {
|
||||
const { Attachments, userDataPath, getRegionCode } = options;
|
||||
const { Attachments, userDataPath, getRegionCode, logger } = options;
|
||||
|
||||
const Migrations = initializeMigrations({
|
||||
userDataPath,
|
||||
|
@ -147,6 +150,7 @@ exports.setup = (options = {}) => {
|
|||
Attachments,
|
||||
Type: AttachmentType,
|
||||
VisualType: VisualAttachment,
|
||||
logger,
|
||||
});
|
||||
|
||||
const Components = {
|
||||
|
|
|
@ -108,9 +108,9 @@ exports._replaceUnicodeOrderOverridesSync = attachment => {
|
|||
exports.replaceUnicodeOrderOverrides = async attachment =>
|
||||
exports._replaceUnicodeOrderOverridesSync(attachment);
|
||||
|
||||
exports.removeSchemaVersion = attachment => {
|
||||
exports.removeSchemaVersion = ({ attachment, logger }) => {
|
||||
if (!exports.isValid(attachment)) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'Attachment.removeSchemaVersion: Invalid input attachment:',
|
||||
attachment
|
||||
);
|
||||
|
@ -197,6 +197,7 @@ exports.captureDimensionsAndScreenshot = async (
|
|||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
}
|
||||
) => {
|
||||
const { contentType } = attachment;
|
||||
|
@ -212,13 +213,17 @@ exports.captureDimensionsAndScreenshot = async (
|
|||
|
||||
if (GoogleChrome.isImageTypeSupported(contentType)) {
|
||||
try {
|
||||
const { width, height } = await getImageDimensions(absolutePath);
|
||||
const { width, height } = await getImageDimensions({
|
||||
objectUrl: absolutePath,
|
||||
logger,
|
||||
});
|
||||
const thumbnailBuffer = await blobToArrayBuffer(
|
||||
await makeImageThumbnail(
|
||||
THUMBNAIL_SIZE,
|
||||
absolutePath,
|
||||
THUMBNAIL_CONTENT_TYPE
|
||||
)
|
||||
await makeImageThumbnail({
|
||||
size: THUMBNAIL_SIZE,
|
||||
objectUrl: absolutePath,
|
||||
contentType: THUMBNAIL_CONTENT_TYPE,
|
||||
logger,
|
||||
})
|
||||
);
|
||||
|
||||
const thumbnailPath = await writeNewAttachmentData(thumbnailBuffer);
|
||||
|
@ -234,7 +239,7 @@ exports.captureDimensionsAndScreenshot = async (
|
|||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'captureDimensionsAndScreenshot:',
|
||||
'error processing image; skipping screenshot generation',
|
||||
toLogFormat(error)
|
||||
|
@ -246,21 +251,29 @@ exports.captureDimensionsAndScreenshot = async (
|
|||
let screenshotObjectUrl;
|
||||
try {
|
||||
const screenshotBuffer = await blobToArrayBuffer(
|
||||
await makeVideoScreenshot(absolutePath, THUMBNAIL_CONTENT_TYPE)
|
||||
await makeVideoScreenshot({
|
||||
objectUrl: absolutePath,
|
||||
contentType: THUMBNAIL_CONTENT_TYPE,
|
||||
logger,
|
||||
})
|
||||
);
|
||||
screenshotObjectUrl = makeObjectUrl(
|
||||
screenshotBuffer,
|
||||
THUMBNAIL_CONTENT_TYPE
|
||||
);
|
||||
const { width, height } = await getImageDimensions(screenshotObjectUrl);
|
||||
const { width, height } = await getImageDimensions({
|
||||
objectUrl: screenshotObjectUrl,
|
||||
logger,
|
||||
});
|
||||
const screenshotPath = await writeNewAttachmentData(screenshotBuffer);
|
||||
|
||||
const thumbnailBuffer = await blobToArrayBuffer(
|
||||
await makeImageThumbnail(
|
||||
THUMBNAIL_SIZE,
|
||||
screenshotObjectUrl,
|
||||
THUMBNAIL_CONTENT_TYPE
|
||||
)
|
||||
await makeImageThumbnail({
|
||||
size: THUMBNAIL_SIZE,
|
||||
objectUrl: screenshotObjectUrl,
|
||||
contentType: THUMBNAIL_CONTENT_TYPE,
|
||||
logger,
|
||||
})
|
||||
);
|
||||
|
||||
const thumbnailPath = await writeNewAttachmentData(thumbnailBuffer);
|
||||
|
@ -283,7 +296,7 @@ exports.captureDimensionsAndScreenshot = async (
|
|||
height,
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'captureDimensionsAndScreenshot: error processing video; skipping screenshot generation',
|
||||
toLogFormat(error)
|
||||
);
|
||||
|
|
|
@ -9,7 +9,7 @@ const { isArrayBuffer, isFunction, isUndefined, omit } = require('lodash');
|
|||
// Promise Attachment
|
||||
exports.migrateDataToFileSystem = async (
|
||||
attachment,
|
||||
{ writeNewAttachmentData } = {}
|
||||
{ writeNewAttachmentData, logger } = {}
|
||||
) => {
|
||||
if (!isFunction(writeNewAttachmentData)) {
|
||||
throw new TypeError("'writeNewAttachmentData' must be a function");
|
||||
|
@ -19,7 +19,7 @@ exports.migrateDataToFileSystem = async (
|
|||
const hasData = !isUndefined(data);
|
||||
const shouldSkipSchemaUpgrade = !hasData;
|
||||
if (shouldSkipSchemaUpgrade) {
|
||||
console.log('WARNING: `attachment.data` is `undefined`');
|
||||
logger.warn('WARNING: `attachment.data` is `undefined`');
|
||||
return attachment;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ exports.parseAndWriteAvatar = upgradeAttachment => async (
|
|||
contact,
|
||||
context = {}
|
||||
) => {
|
||||
const { message, regionCode } = context;
|
||||
const { message, regionCode, logger } = context;
|
||||
const { avatar } = contact;
|
||||
|
||||
// This is to ensure that an omit() call doesn't pull in prototype props/methods
|
||||
|
@ -35,7 +35,7 @@ exports.parseAndWriteAvatar = upgradeAttachment => async (
|
|||
messageId: idForLogging(message),
|
||||
});
|
||||
if (error) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'Contact.parseAndWriteAvatar: contact was malformed.',
|
||||
toLogFormat(error)
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { isFunction, isString, omit } = require('lodash');
|
||||
const { isFunction, isObject, isString, omit } = require('lodash');
|
||||
|
||||
const Contact = require('./contact');
|
||||
const Attachment = require('./attachment');
|
||||
|
@ -55,7 +55,7 @@ exports.PRIVATE = PRIVATE;
|
|||
exports.isValid = () => true;
|
||||
|
||||
// Schema
|
||||
exports.initializeSchemaVersion = message => {
|
||||
exports.initializeSchemaVersion = ({ message, logger }) => {
|
||||
const isInitialized =
|
||||
SchemaVersion.isValid(message.schemaVersion) && message.schemaVersion >= 1;
|
||||
if (isInitialized) {
|
||||
|
@ -82,7 +82,9 @@ exports.initializeSchemaVersion = message => {
|
|||
: INITIAL_SCHEMA_VERSION;
|
||||
const messageWithInitialSchema = Object.assign({}, message, {
|
||||
schemaVersion: inheritedSchemaVersion,
|
||||
attachments: message.attachments.map(Attachment.removeSchemaVersion),
|
||||
attachments: message.attachments.map(attachment =>
|
||||
Attachment.removeSchemaVersion({ attachment, logger })
|
||||
),
|
||||
});
|
||||
|
||||
return messageWithInitialSchema;
|
||||
|
@ -92,17 +94,24 @@ exports.initializeSchemaVersion = message => {
|
|||
// type UpgradeStep = (Message, Context) -> Promise Message
|
||||
|
||||
// SchemaVersion -> UpgradeStep -> UpgradeStep
|
||||
exports._withSchemaVersion = (schemaVersion, upgrade) => {
|
||||
exports._withSchemaVersion = ({ schemaVersion, upgrade }) => {
|
||||
if (!SchemaVersion.isValid(schemaVersion)) {
|
||||
throw new TypeError("'schemaVersion' is invalid");
|
||||
throw new TypeError('_withSchemaVersion: schemaVersion is invalid');
|
||||
}
|
||||
if (!isFunction(upgrade)) {
|
||||
throw new TypeError("'upgrade' must be a function");
|
||||
throw new TypeError('_withSchemaVersion: upgrade must be a function');
|
||||
}
|
||||
|
||||
return async (message, context) => {
|
||||
if (!context || !isObject(context.logger)) {
|
||||
throw new TypeError(
|
||||
'_withSchemaVersion: context must have logger object'
|
||||
);
|
||||
}
|
||||
const { logger } = context;
|
||||
|
||||
if (!exports.isValid(message)) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'Message._withSchemaVersion: Invalid input message:',
|
||||
message
|
||||
);
|
||||
|
@ -117,7 +126,7 @@ exports._withSchemaVersion = (schemaVersion, upgrade) => {
|
|||
const expectedVersion = schemaVersion - 1;
|
||||
const hasExpectedVersion = message.schemaVersion === expectedVersion;
|
||||
if (!hasExpectedVersion) {
|
||||
console.log(
|
||||
logger.warn(
|
||||
'WARNING: Message._withSchemaVersion: Unexpected version:',
|
||||
`Expected message to have version ${expectedVersion},`,
|
||||
`but got ${message.schemaVersion}.`,
|
||||
|
@ -130,7 +139,7 @@ exports._withSchemaVersion = (schemaVersion, upgrade) => {
|
|||
try {
|
||||
upgradedMessage = await upgrade(message, context);
|
||||
} catch (error) {
|
||||
console.log(
|
||||
logger.error(
|
||||
`Message._withSchemaVersion: error updating message ${message.id}:`,
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
|
@ -138,7 +147,7 @@ exports._withSchemaVersion = (schemaVersion, upgrade) => {
|
|||
}
|
||||
|
||||
if (!exports.isValid(upgradedMessage)) {
|
||||
console.log(
|
||||
logger.error(
|
||||
'Message._withSchemaVersion: Invalid upgraded message:',
|
||||
upgradedMessage
|
||||
);
|
||||
|
@ -186,6 +195,10 @@ exports._mapQuotedAttachments = upgradeAttachment => async (
|
|||
if (!message.quote) {
|
||||
return message;
|
||||
}
|
||||
if (!context || !isObject(context.logger)) {
|
||||
throw new Error('_mapQuotedAttachments: context must have logger object');
|
||||
}
|
||||
const { logger } = context;
|
||||
|
||||
const upgradeWithContext = async attachment => {
|
||||
const { thumbnail } = attachment;
|
||||
|
@ -194,7 +207,7 @@ exports._mapQuotedAttachments = upgradeAttachment => async (
|
|||
}
|
||||
|
||||
if (!thumbnail.data) {
|
||||
console.log('Quoted attachment did not have thumbnail data; removing it');
|
||||
logger.warn('Quoted attachment did not have thumbnail data; removing it');
|
||||
return omit(attachment, ['thumbnail']);
|
||||
}
|
||||
|
||||
|
@ -216,39 +229,46 @@ exports._mapQuotedAttachments = upgradeAttachment => async (
|
|||
});
|
||||
};
|
||||
|
||||
const toVersion0 = async message => exports.initializeSchemaVersion(message);
|
||||
const toVersion1 = exports._withSchemaVersion(
|
||||
1,
|
||||
exports._mapAttachments(Attachment.autoOrientJPEG)
|
||||
);
|
||||
const toVersion2 = exports._withSchemaVersion(
|
||||
2,
|
||||
exports._mapAttachments(Attachment.replaceUnicodeOrderOverrides)
|
||||
);
|
||||
const toVersion3 = exports._withSchemaVersion(
|
||||
3,
|
||||
exports._mapAttachments(Attachment.migrateDataToFileSystem)
|
||||
);
|
||||
const toVersion4 = exports._withSchemaVersion(
|
||||
4,
|
||||
exports._mapQuotedAttachments(Attachment.migrateDataToFileSystem)
|
||||
);
|
||||
const toVersion5 = exports._withSchemaVersion(5, initializeAttachmentMetadata);
|
||||
const toVersion6 = exports._withSchemaVersion(
|
||||
6,
|
||||
exports._mapContact(
|
||||
const toVersion0 = async (message, context) =>
|
||||
exports.initializeSchemaVersion({ message, logger: context.logger });
|
||||
const toVersion1 = exports._withSchemaVersion({
|
||||
schemaVersion: 1,
|
||||
upgrade: exports._mapAttachments(Attachment.autoOrientJPEG),
|
||||
});
|
||||
const toVersion2 = exports._withSchemaVersion({
|
||||
schemaVersion: 2,
|
||||
upgrade: exports._mapAttachments(Attachment.replaceUnicodeOrderOverrides),
|
||||
});
|
||||
const toVersion3 = exports._withSchemaVersion({
|
||||
schemaVersion: 3,
|
||||
upgrade: exports._mapAttachments(Attachment.migrateDataToFileSystem),
|
||||
});
|
||||
const toVersion4 = exports._withSchemaVersion({
|
||||
schemaVersion: 4,
|
||||
upgrade: exports._mapQuotedAttachments(Attachment.migrateDataToFileSystem),
|
||||
});
|
||||
const toVersion5 = exports._withSchemaVersion({
|
||||
schemaVersion: 5,
|
||||
upgrade: initializeAttachmentMetadata,
|
||||
});
|
||||
const toVersion6 = exports._withSchemaVersion({
|
||||
schemaVersion: 6,
|
||||
upgrade: exports._mapContact(
|
||||
Contact.parseAndWriteAvatar(Attachment.migrateDataToFileSystem)
|
||||
)
|
||||
);
|
||||
),
|
||||
});
|
||||
// IMPORTANT: We’ve updated our definition of `initializeAttachmentMetadata`, so
|
||||
// we need to run it again on existing items that have previously been incorrectly
|
||||
// classified:
|
||||
const toVersion7 = exports._withSchemaVersion(7, initializeAttachmentMetadata);
|
||||
const toVersion7 = exports._withSchemaVersion({
|
||||
schemaVersion: 7,
|
||||
upgrade: initializeAttachmentMetadata,
|
||||
});
|
||||
|
||||
const toVersion8 = exports._withSchemaVersion(
|
||||
8,
|
||||
exports._mapAttachments(Attachment.captureDimensionsAndScreenshot)
|
||||
);
|
||||
const toVersion8 = exports._withSchemaVersion({
|
||||
schemaVersion: 8,
|
||||
upgrade: exports._mapAttachments(Attachment.captureDimensionsAndScreenshot),
|
||||
});
|
||||
|
||||
const VERSIONS = [
|
||||
toVersion0,
|
||||
|
@ -275,6 +295,7 @@ exports.upgradeSchema = async (
|
|||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
} = {}
|
||||
) => {
|
||||
if (!isFunction(writeNewAttachmentData)) {
|
||||
|
@ -301,6 +322,9 @@ exports.upgradeSchema = async (
|
|||
if (!isFunction(makeVideoScreenshot)) {
|
||||
throw new TypeError('context.makeVideoScreenshot is required');
|
||||
}
|
||||
if (!isObject(logger)) {
|
||||
throw new TypeError('context.logger is required');
|
||||
}
|
||||
|
||||
let message = rawMessage;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
|
@ -317,6 +341,7 @@ exports.upgradeSchema = async (
|
|||
getImageDimensions,
|
||||
makeImageThumbnail,
|
||||
makeVideoScreenshot,
|
||||
logger,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -339,9 +364,17 @@ exports.createAttachmentLoader = loadAttachmentData => {
|
|||
// createAttachmentDataWriter :: (RelativePath -> IO Unit)
|
||||
// Message ->
|
||||
// IO (Promise Message)
|
||||
exports.createAttachmentDataWriter = writeExistingAttachmentData => {
|
||||
exports.createAttachmentDataWriter = ({
|
||||
writeExistingAttachmentData,
|
||||
logger,
|
||||
}) => {
|
||||
if (!isFunction(writeExistingAttachmentData)) {
|
||||
throw new TypeError("'writeExistingAttachmentData' must be a function");
|
||||
throw new TypeError(
|
||||
'createAttachmentDataWriter: writeExistingAttachmentData must be a function'
|
||||
);
|
||||
}
|
||||
if (!isObject(logger)) {
|
||||
throw new TypeError('createAttachmentDataWriter: logger must be an object');
|
||||
}
|
||||
|
||||
return async rawMessage => {
|
||||
|
@ -349,7 +382,10 @@ exports.createAttachmentDataWriter = writeExistingAttachmentData => {
|
|||
throw new TypeError("'rawMessage' is not valid");
|
||||
}
|
||||
|
||||
const message = exports.initializeSchemaVersion(rawMessage);
|
||||
const message = exports.initializeSchemaVersion({
|
||||
message: rawMessage,
|
||||
logger,
|
||||
});
|
||||
|
||||
const { attachments, quote, contact } = message;
|
||||
const hasFilesToWrite =
|
||||
|
@ -387,7 +423,7 @@ exports.createAttachmentDataWriter = writeExistingAttachmentData => {
|
|||
|
||||
// we want to be bulletproof to thumbnails without data
|
||||
if (!data || !path) {
|
||||
console.log(
|
||||
logger.warn(
|
||||
'Thumbnail had neither data nor path.',
|
||||
'id:',
|
||||
message.id,
|
||||
|
@ -418,7 +454,7 @@ exports.createAttachmentDataWriter = writeExistingAttachmentData => {
|
|||
|
||||
const messageWithoutAttachmentData = Object.assign(
|
||||
{},
|
||||
await writeThumbnails(message),
|
||||
await writeThumbnails(message, { logger }),
|
||||
{
|
||||
contact: await Promise.all((contact || []).map(writeContactAvatar)),
|
||||
attachments: await Promise.all(
|
||||
|
|
|
@ -10,7 +10,7 @@ const {
|
|||
|
||||
exports.blobToArrayBuffer = blobToArrayBuffer;
|
||||
|
||||
exports.getImageDimensions = objectUrl =>
|
||||
exports.getImageDimensions = ({ objectUrl, logger }) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const image = document.createElement('img');
|
||||
|
||||
|
@ -21,14 +21,19 @@ exports.getImageDimensions = objectUrl =>
|
|||
});
|
||||
});
|
||||
image.addEventListener('error', error => {
|
||||
console.log('getImageDimensions error', toLogFormat(error));
|
||||
logger.error('getImageDimensions error', toLogFormat(error));
|
||||
reject(error);
|
||||
});
|
||||
|
||||
image.src = objectUrl;
|
||||
});
|
||||
|
||||
exports.makeImageThumbnail = (size, objectUrl, contentType = 'image/png') =>
|
||||
exports.makeImageThumbnail = ({
|
||||
size,
|
||||
objectUrl,
|
||||
contentType = 'image/png',
|
||||
logger,
|
||||
}) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const image = document.createElement('img');
|
||||
|
||||
|
@ -61,14 +66,18 @@ exports.makeImageThumbnail = (size, objectUrl, contentType = 'image/png') =>
|
|||
});
|
||||
|
||||
image.addEventListener('error', error => {
|
||||
console.log('makeImageThumbnail error', toLogFormat(error));
|
||||
logger.error('makeImageThumbnail error', toLogFormat(error));
|
||||
reject(error);
|
||||
});
|
||||
|
||||
image.src = objectUrl;
|
||||
});
|
||||
|
||||
exports.makeVideoScreenshot = (objectUrl, contentType = 'image/png') =>
|
||||
exports.makeVideoScreenshot = ({
|
||||
objectUrl,
|
||||
contentType = 'image/png',
|
||||
logger,
|
||||
}) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const video = document.createElement('video');
|
||||
|
||||
|
@ -89,25 +98,33 @@ exports.makeVideoScreenshot = (objectUrl, contentType = 'image/png') =>
|
|||
|
||||
video.addEventListener('canplay', capture);
|
||||
video.addEventListener('error', error => {
|
||||
console.log('makeVideoThumbnail error', toLogFormat(error));
|
||||
logger.error('makeVideoThumbnail error', toLogFormat(error));
|
||||
reject(error);
|
||||
});
|
||||
|
||||
video.src = objectUrl;
|
||||
});
|
||||
|
||||
exports.makeVideoThumbnail = async (size, videoObjectUrl) => {
|
||||
exports.makeVideoThumbnail = async ({ size, videoObjectUrl, logger }) => {
|
||||
let screenshotObjectUrl;
|
||||
try {
|
||||
const type = 'image/png';
|
||||
const blob = await exports.makeVideoScreenshot(videoObjectUrl, type);
|
||||
const blob = await exports.makeVideoScreenshot({
|
||||
objectUrl: videoObjectUrl,
|
||||
contentType: type,
|
||||
logger,
|
||||
});
|
||||
const data = await blobToArrayBuffer(blob);
|
||||
screenshotObjectUrl = arrayBufferToObjectURL({
|
||||
data,
|
||||
type,
|
||||
});
|
||||
|
||||
return exports.makeImageThumbnail(size, screenshotObjectUrl);
|
||||
return exports.makeImageThumbnail({
|
||||
size,
|
||||
objectUrl: screenshotObjectUrl,
|
||||
logger,
|
||||
});
|
||||
} finally {
|
||||
exports.revokeObjectUrl(screenshotObjectUrl);
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ const is = require('@sindresorhus/is');
|
|||
|
||||
/* global Buffer: false */
|
||||
/* global setTimeout: false */
|
||||
/* global log: false */
|
||||
|
||||
/* eslint-disable more/no-then, no-bitwise, no-nested-ternary */
|
||||
|
||||
|
@ -159,7 +160,7 @@ function _createSocket(url, { certificateAuthority, proxyUrl }) {
|
|||
function _promiseAjax(providedUrl, options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const url = providedUrl || `${options.host}/${options.path}`;
|
||||
console.log(options.type, url);
|
||||
log.info(options.type, url);
|
||||
const timeout =
|
||||
typeof options.timeout !== 'undefined' ? options.timeout : 10000;
|
||||
|
||||
|
@ -220,7 +221,7 @@ function _promiseAjax(providedUrl, options) {
|
|||
if (options.responseType === 'json') {
|
||||
if (options.validateResponse) {
|
||||
if (!_validateResponse(result, options.validateResponse)) {
|
||||
console.log(options.type, url, response.status, 'Error');
|
||||
log.error(options.type, url, response.status, 'Error');
|
||||
reject(
|
||||
HTTPError(
|
||||
'promiseAjax: invalid response',
|
||||
|
@ -233,10 +234,10 @@ function _promiseAjax(providedUrl, options) {
|
|||
}
|
||||
}
|
||||
if (response.status >= 0 && response.status < 400) {
|
||||
console.log(options.type, url, response.status, 'Success');
|
||||
log.info(options.type, url, response.status, 'Success');
|
||||
resolve(result, response.status);
|
||||
} else {
|
||||
console.log(options.type, url, response.status, 'Error');
|
||||
log.error(options.type, url, response.status, 'Error');
|
||||
reject(
|
||||
HTTPError(
|
||||
'promiseAjax: error response',
|
||||
|
@ -249,7 +250,7 @@ function _promiseAjax(providedUrl, options) {
|
|||
});
|
||||
})
|
||||
.catch(e => {
|
||||
console.log(options.type, url, 0, 'Error');
|
||||
log.error(options.type, url, 0, 'Error');
|
||||
const stack = `${e.stack}\nInitial stack:\n${options.stack}`;
|
||||
reject(HTTPError('promiseAjax catch', 0, e.toString(), stack));
|
||||
});
|
||||
|
@ -650,7 +651,7 @@ function initialize({ url, cdnUrl, certificateAuthority, proxyUrl }) {
|
|||
}
|
||||
|
||||
function getMessageSocket() {
|
||||
console.log('opening message socket', url);
|
||||
log.info('opening message socket', url);
|
||||
const fixedScheme = url
|
||||
.replace('https://', 'wss://')
|
||||
.replace('http://', 'ws://');
|
||||
|
@ -664,7 +665,7 @@ function initialize({ url, cdnUrl, certificateAuthority, proxyUrl }) {
|
|||
}
|
||||
|
||||
function getProvisioningSocket() {
|
||||
console.log('opening provisioning socket', url);
|
||||
log.info('opening provisioning socket', url);
|
||||
const fixedScheme = url
|
||||
.replace('https://', 'wss://')
|
||||
.replace('http://', 'ws://');
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue