Updates to backup infrastructure
This commit is contained in:
parent
47f834cf5c
commit
e4babdaef0
14 changed files with 599 additions and 710 deletions
|
@ -223,8 +223,8 @@ module.exports = grunt => {
|
|||
grunt.registerTask('getExpireTime', () => {
|
||||
grunt.task.requires('gitinfo');
|
||||
const gitinfo = grunt.config.get('gitinfo');
|
||||
const commited = gitinfo.local.branch.current.lastCommitTime;
|
||||
const time = Date.parse(commited) + 1000 * 60 * 60 * 24 * 90;
|
||||
const committed = gitinfo.local.branch.current.lastCommitTime;
|
||||
const time = Date.parse(committed) + 1000 * 60 * 60 * 24 * 90;
|
||||
grunt.file.write(
|
||||
'config/local-production.json',
|
||||
`${JSON.stringify({ buildExpiration: time })}\n`
|
||||
|
@ -263,7 +263,7 @@ module.exports = grunt => {
|
|||
app.client
|
||||
.execute(getMochaResults)
|
||||
.then(data => Boolean(data.value)),
|
||||
10000,
|
||||
25000,
|
||||
'Expected to find window.mochaResults set!'
|
||||
)
|
||||
)
|
||||
|
|
|
@ -19,6 +19,7 @@ module.exports = {
|
|||
createOrUpdateGroup,
|
||||
getGroupById,
|
||||
getAllGroupIds,
|
||||
getAllGroups,
|
||||
bulkAddGroups,
|
||||
removeGroupById,
|
||||
removeAllGroups,
|
||||
|
@ -567,6 +568,10 @@ async function getAllGroupIds() {
|
|||
const rows = await db.all('SELECT id FROM groups ORDER BY id ASC;');
|
||||
return map(rows, row => row.id);
|
||||
}
|
||||
async function getAllGroups() {
|
||||
const rows = await db.all('SELECT id FROM groups ORDER BY id ASC;');
|
||||
return map(rows, row => jsonToObject(row.json));
|
||||
}
|
||||
async function bulkAddGroups(array) {
|
||||
return bulkAdd(GROUPS_TABLE, array);
|
||||
}
|
||||
|
|
|
@ -7,22 +7,20 @@
|
|||
/* eslint-env browser */
|
||||
/* eslint-env node */
|
||||
|
||||
/* eslint-disable no-param-reassign, guard-for-in, no-unreachable */
|
||||
/* eslint-disable no-param-reassign, guard-for-in */
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const { map, fromPairs } = require('lodash');
|
||||
const tar = require('tar');
|
||||
const tmp = require('tmp');
|
||||
const pify = require('pify');
|
||||
const archiver = require('archiver');
|
||||
const rimraf = require('rimraf');
|
||||
const electronRemote = require('electron').remote;
|
||||
|
||||
const Attachment = require('./types/attachment');
|
||||
const crypto = require('./crypto');
|
||||
|
||||
const decompress = () => null;
|
||||
const { dialog, BrowserWindow } = electronRemote;
|
||||
|
||||
module.exports = {
|
||||
|
@ -111,100 +109,55 @@ function createOutputStream(writer) {
|
|||
};
|
||||
}
|
||||
|
||||
async function exportContactAndGroupsToFile(db, parent) {
|
||||
async function exportContactAndGroupsToFile(parent) {
|
||||
const writer = await createFileAndWriter(parent, 'db.json');
|
||||
return exportContactsAndGroups(db, writer);
|
||||
return exportContactsAndGroups(writer);
|
||||
}
|
||||
|
||||
function exportContactsAndGroups(db, fileWriter) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let storeNames = db.objectStoreNames;
|
||||
storeNames = _.without(
|
||||
storeNames,
|
||||
'messages',
|
||||
'items',
|
||||
'signedPreKeys',
|
||||
'preKeys',
|
||||
'identityKeys',
|
||||
'sessions',
|
||||
'unprocessed'
|
||||
);
|
||||
function writeArray(stream, array) {
|
||||
stream.write('[');
|
||||
|
||||
const exportedStoreNames = [];
|
||||
if (storeNames.length === 0) {
|
||||
throw new Error('No stores to export');
|
||||
for (let i = 0, max = array.length; i < max; i += 1) {
|
||||
if (i > 0) {
|
||||
stream.write(',');
|
||||
}
|
||||
window.log.info('Exporting from these stores:', storeNames.join(', '));
|
||||
|
||||
const stream = createOutputStream(fileWriter);
|
||||
const item = array[i];
|
||||
|
||||
stream.write('{');
|
||||
// We don't back up avatars; we'll get them in a future contact sync or profile fetch
|
||||
const cleaned = _.omit(item, ['avatar', 'profileAvatar']);
|
||||
|
||||
_.each(storeNames, storeName => {
|
||||
// Both the readwrite permission and the multi-store transaction are required to
|
||||
// keep this function working. They serve to serialize all of these transactions,
|
||||
// one per store to be exported.
|
||||
const transaction = db.transaction(storeNames, 'readwrite');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`exportToJsonFile transaction error (store: ${storeName})`,
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
window.log.info('transaction complete');
|
||||
};
|
||||
stream.write(JSON.stringify(stringify(cleaned)));
|
||||
}
|
||||
|
||||
const store = transaction.objectStore(storeName);
|
||||
const request = store.openCursor();
|
||||
let count = 0;
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`exportToJsonFile request error (store: ${storeNames})`,
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
request.onsuccess = async event => {
|
||||
if (count === 0) {
|
||||
window.log.info('cursor opened');
|
||||
stream.write(`"${storeName}": [`);
|
||||
}
|
||||
stream.write(']');
|
||||
}
|
||||
|
||||
const cursor = event.target.result;
|
||||
if (cursor) {
|
||||
if (count > 0) {
|
||||
stream.write(',');
|
||||
}
|
||||
function getPlainJS(collection) {
|
||||
return collection.map(model => model.attributes);
|
||||
}
|
||||
|
||||
// Preventing base64'd images from reaching the disk, making db.json too big
|
||||
const item = _.omit(cursor.value, ['avatar', 'profileAvatar']);
|
||||
async function exportContactsAndGroups(fileWriter) {
|
||||
const stream = createOutputStream(fileWriter);
|
||||
|
||||
const jsonString = JSON.stringify(stringify(item));
|
||||
stream.write(jsonString);
|
||||
cursor.continue();
|
||||
count += 1;
|
||||
} else {
|
||||
// no more
|
||||
stream.write(']');
|
||||
window.log.info('Exported', count, 'items from store', storeName);
|
||||
stream.write('{');
|
||||
|
||||
exportedStoreNames.push(storeName);
|
||||
if (exportedStoreNames.length < storeNames.length) {
|
||||
stream.write(',');
|
||||
} else {
|
||||
window.log.info('Exported all stores');
|
||||
stream.write('}');
|
||||
|
||||
await stream.close();
|
||||
window.log.info('Finished writing all stores to disk');
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
stream.write('"conversations": ');
|
||||
const conversations = await window.Signal.Data.getAllConversations({
|
||||
ConversationCollection: Whisper.ConversationCollection,
|
||||
});
|
||||
window.log.info(`Exporting ${conversations.length} conversations`);
|
||||
writeArray(stream, getPlainJS(conversations));
|
||||
|
||||
stream.write(',');
|
||||
|
||||
stream.write('"groups": ');
|
||||
const groups = await window.Signal.Data.getAllGroups();
|
||||
window.log.info(`Exporting ${groups.length} groups`);
|
||||
writeArray(stream, groups);
|
||||
|
||||
stream.write('}');
|
||||
await stream.close();
|
||||
}
|
||||
|
||||
async function importNonMessages(parent, options) {
|
||||
|
@ -414,6 +367,14 @@ function readFileAsText(parent, name) {
|
|||
});
|
||||
}
|
||||
|
||||
// Buffer instances are also Uint8Array instances, but they might be a view
|
||||
// https://nodejs.org/docs/latest/api/buffer.html#buffer_buffers_and_typedarray
|
||||
const toArrayBuffer = nodeBuffer =>
|
||||
nodeBuffer.buffer.slice(
|
||||
nodeBuffer.byteOffset,
|
||||
nodeBuffer.byteOffset + nodeBuffer.byteLength
|
||||
);
|
||||
|
||||
function readFileAsArrayBuffer(targetPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// omitting the encoding to get a buffer back
|
||||
|
@ -422,9 +383,7 @@ function readFileAsArrayBuffer(targetPath) {
|
|||
return reject(error);
|
||||
}
|
||||
|
||||
// Buffer instances are also Uint8Array instances
|
||||
// https://nodejs.org/docs/latest/api/buffer.html#buffer_buffers_and_typedarray
|
||||
return resolve(buffer.buffer);
|
||||
return resolve(toArrayBuffer(buffer));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -468,7 +427,7 @@ function _getAnonymousAttachmentFileName(message, index) {
|
|||
return `${message.id}-${index}`;
|
||||
}
|
||||
|
||||
async function readAttachment(dir, attachment, name, options) {
|
||||
async function readEncryptedAttachment(dir, attachment, name, options) {
|
||||
options = options || {};
|
||||
const { key } = options;
|
||||
|
||||
|
@ -485,26 +444,29 @@ async function readAttachment(dir, attachment, name, options) {
|
|||
const isEncrypted = !_.isUndefined(key);
|
||||
|
||||
if (isEncrypted) {
|
||||
attachment.data = await crypto.decryptSymmetric(key, data);
|
||||
attachment.data = await crypto.decryptAttachment(
|
||||
key,
|
||||
attachment.path,
|
||||
data
|
||||
);
|
||||
} else {
|
||||
attachment.data = data;
|
||||
}
|
||||
}
|
||||
|
||||
async function writeThumbnail(attachment, options) {
|
||||
async function writeQuoteThumbnail(attachment, options) {
|
||||
if (!attachment || !attachment.thumbnail || !attachment.thumbnail.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { dir, message, index, key, newKey } = options;
|
||||
const filename = `${_getAnonymousAttachmentFileName(
|
||||
message,
|
||||
index
|
||||
)}-thumbnail`;
|
||||
)}-quote-thumbnail`;
|
||||
const target = path.join(dir, filename);
|
||||
const { thumbnail } = attachment;
|
||||
|
||||
if (!thumbnail || !thumbnail.data) {
|
||||
return;
|
||||
}
|
||||
|
||||
await writeEncryptedAttachment(target, thumbnail.data, {
|
||||
await writeEncryptedAttachment(target, attachment.thumbnail.path, {
|
||||
key,
|
||||
newKey,
|
||||
filename,
|
||||
|
@ -512,25 +474,13 @@ async function writeThumbnail(attachment, options) {
|
|||
});
|
||||
}
|
||||
|
||||
async function writeThumbnails(rawQuotedAttachments, options) {
|
||||
async function writeQuoteThumbnails(quotedAttachments, options) {
|
||||
const { name } = options;
|
||||
|
||||
const { loadAttachmentData } = Signal.Migrations;
|
||||
const promises = rawQuotedAttachments.map(async attachment => {
|
||||
if (!attachment || !attachment.thumbnail || !attachment.thumbnail.path) {
|
||||
return attachment;
|
||||
}
|
||||
|
||||
return Object.assign({}, attachment, {
|
||||
thumbnail: await loadAttachmentData(attachment.thumbnail),
|
||||
});
|
||||
});
|
||||
|
||||
const attachments = await Promise.all(promises);
|
||||
try {
|
||||
await Promise.all(
|
||||
_.map(attachments, (attachment, index) =>
|
||||
writeThumbnail(
|
||||
_.map(quotedAttachments, (attachment, index) =>
|
||||
writeQuoteThumbnail(
|
||||
attachment,
|
||||
Object.assign({}, options, {
|
||||
index,
|
||||
|
@ -550,26 +500,57 @@ async function writeThumbnails(rawQuotedAttachments, options) {
|
|||
}
|
||||
|
||||
async function writeAttachment(attachment, options) {
|
||||
if (!_.isString(attachment.path)) {
|
||||
throw new Error('writeAttachment: attachment.path was not a string!');
|
||||
}
|
||||
|
||||
const { dir, message, index, key, newKey } = options;
|
||||
const filename = _getAnonymousAttachmentFileName(message, index);
|
||||
const target = path.join(dir, filename);
|
||||
if (!Attachment.hasData(attachment)) {
|
||||
throw new TypeError("'attachment.data' is required");
|
||||
}
|
||||
|
||||
await writeEncryptedAttachment(target, attachment.data, {
|
||||
await writeEncryptedAttachment(target, attachment.path, {
|
||||
key,
|
||||
newKey,
|
||||
filename,
|
||||
dir,
|
||||
});
|
||||
|
||||
if (attachment.thumbnail && _.isString(attachment.thumbnail.path)) {
|
||||
const thumbnailName = `${_getAnonymousAttachmentFileName(
|
||||
message,
|
||||
index
|
||||
)}-thumbnail`;
|
||||
const thumbnailTarget = path.join(dir, thumbnailName);
|
||||
await writeEncryptedAttachment(thumbnailTarget, attachment.thumbnail.path, {
|
||||
key,
|
||||
newKey,
|
||||
filename: thumbnailName,
|
||||
dir,
|
||||
});
|
||||
}
|
||||
|
||||
if (attachment.screenshot && _.isString(attachment.screenshot.path)) {
|
||||
const screenshotName = `${_getAnonymousAttachmentFileName(
|
||||
message,
|
||||
index
|
||||
)}-screenshot`;
|
||||
const screenshotTarget = path.join(dir, screenshotName);
|
||||
await writeEncryptedAttachment(
|
||||
screenshotTarget,
|
||||
attachment.screenshot.path,
|
||||
{
|
||||
key,
|
||||
newKey,
|
||||
filename: screenshotName,
|
||||
dir,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeAttachments(rawAttachments, options) {
|
||||
async function writeAttachments(attachments, options) {
|
||||
const { name } = options;
|
||||
|
||||
const { loadAttachmentData } = Signal.Migrations;
|
||||
const attachments = await Promise.all(rawAttachments.map(loadAttachmentData));
|
||||
const promises = _.map(attachments, (attachment, index) =>
|
||||
writeAttachment(
|
||||
attachment,
|
||||
|
@ -591,17 +572,18 @@ async function writeAttachments(rawAttachments, options) {
|
|||
}
|
||||
}
|
||||
|
||||
async function writeAvatar(avatar, options) {
|
||||
const { dir, message, index, key, newKey } = options;
|
||||
const name = _getAnonymousAttachmentFileName(message, index);
|
||||
const filename = `${name}-contact-avatar`;
|
||||
|
||||
const target = path.join(dir, filename);
|
||||
if (!avatar || !avatar.path) {
|
||||
async function writeAvatar(contact, options) {
|
||||
const { avatar } = contact || {};
|
||||
if (!avatar || !avatar.avatar || !avatar.avatar.path) {
|
||||
return;
|
||||
}
|
||||
|
||||
await writeEncryptedAttachment(target, avatar.data, {
|
||||
const { dir, message, index, key, newKey } = options;
|
||||
const name = _getAnonymousAttachmentFileName(message, index);
|
||||
const filename = `${name}-contact-avatar`;
|
||||
const target = path.join(dir, filename);
|
||||
|
||||
await writeEncryptedAttachment(target, avatar.avatar.path, {
|
||||
key,
|
||||
newKey,
|
||||
filename,
|
||||
|
@ -612,23 +594,9 @@ async function writeAvatar(avatar, options) {
|
|||
async function writeContactAvatars(contact, options) {
|
||||
const { name } = options;
|
||||
|
||||
const { loadAttachmentData } = Signal.Migrations;
|
||||
const promises = contact.map(async item => {
|
||||
if (
|
||||
!item ||
|
||||
!item.avatar ||
|
||||
!item.avatar.avatar ||
|
||||
!item.avatar.avatar.path
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return loadAttachmentData(item.avatar.avatar);
|
||||
});
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
_.map(await Promise.all(promises), (item, index) =>
|
||||
_.map(contact, (item, index) =>
|
||||
writeAvatar(
|
||||
item,
|
||||
Object.assign({}, options, {
|
||||
|
@ -648,7 +616,7 @@ async function writeContactAvatars(contact, options) {
|
|||
}
|
||||
}
|
||||
|
||||
async function writeEncryptedAttachment(target, data, options = {}) {
|
||||
async function writeEncryptedAttachment(target, source, options = {}) {
|
||||
const { key, newKey, filename, dir } = options;
|
||||
|
||||
if (fs.existsSync(target)) {
|
||||
|
@ -661,7 +629,9 @@ async function writeEncryptedAttachment(target, data, options = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
const ciphertext = await crypto.encryptSymmetric(key, data);
|
||||
const { readAttachmentData } = Signal.Migrations;
|
||||
const data = await readAttachmentData(source);
|
||||
const ciphertext = await crypto.encryptAttachment(key, source, data);
|
||||
|
||||
const writer = await createFileAndWriter(dir, filename);
|
||||
const stream = createOutputStream(writer);
|
||||
|
@ -673,9 +643,9 @@ function _sanitizeFileName(filename) {
|
|||
return filename.toString().replace(/[^a-z0-9.,+()'#\- ]/gi, '_');
|
||||
}
|
||||
|
||||
async function exportConversation(db, conversation, options) {
|
||||
options = options || {};
|
||||
async function exportConversation(conversation, options = {}) {
|
||||
const { name, dir, attachmentsDir, key, newKey } = options;
|
||||
|
||||
if (!name) {
|
||||
throw new Error('Need a name!');
|
||||
}
|
||||
|
@ -691,143 +661,111 @@ async function exportConversation(db, conversation, options) {
|
|||
|
||||
window.log.info('exporting conversation', name);
|
||||
const writer = await createFileAndWriter(dir, 'messages.json');
|
||||
const stream = createOutputStream(writer);
|
||||
stream.write('{"messages":[');
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
// TODO: need to iterate through message ids, export using window.Signal.Data
|
||||
const transaction = db.transaction('messages', 'readwrite');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`exportConversation transaction error (conversation: ${name})`,
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
// this doesn't really mean anything - we may have attachment processing to do
|
||||
};
|
||||
const CHUNK_SIZE = 50;
|
||||
let count = 0;
|
||||
let complete = false;
|
||||
|
||||
const store = transaction.objectStore('messages');
|
||||
const index = store.index('conversation');
|
||||
const range = window.IDBKeyRange.bound(
|
||||
[conversation.id, 0],
|
||||
[conversation.id, Number.MAX_VALUE]
|
||||
);
|
||||
// We're looping from the most recent to the oldest
|
||||
let lastReceivedAt = Number.MAX_VALUE;
|
||||
|
||||
let promiseChain = Promise.resolve();
|
||||
let count = 0;
|
||||
const request = index.openCursor(range);
|
||||
|
||||
const stream = createOutputStream(writer);
|
||||
stream.write('{"messages":[');
|
||||
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
`exportConversation request error (conversation: ${name})`,
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
request.onsuccess = async event => {
|
||||
const cursor = event.target.result;
|
||||
if (cursor) {
|
||||
const message = cursor.value;
|
||||
const { attachments } = message;
|
||||
|
||||
// skip message if it is disappearing, no matter the amount of time left
|
||||
if (message.expireTimer) {
|
||||
cursor.continue();
|
||||
return;
|
||||
}
|
||||
|
||||
if (count !== 0) {
|
||||
stream.write(',');
|
||||
}
|
||||
|
||||
// eliminate attachment data from the JSON, since it will go to disk
|
||||
// Note: this is for legacy messages only, which stored attachment data in the db
|
||||
message.attachments = _.map(attachments, attachment =>
|
||||
_.omit(attachment, ['data'])
|
||||
);
|
||||
// completely drop any attachments in messages cached in error objects
|
||||
// TODO: move to lodash. Sadly, a number of the method signatures have changed!
|
||||
message.errors = _.map(message.errors, error => {
|
||||
if (error && error.args) {
|
||||
error.args = [];
|
||||
}
|
||||
if (error && error.stack) {
|
||||
error.stack = '';
|
||||
}
|
||||
return error;
|
||||
});
|
||||
|
||||
const jsonString = JSON.stringify(stringify(message));
|
||||
stream.write(jsonString);
|
||||
|
||||
if (attachments && attachments.length > 0) {
|
||||
const exportAttachments = () =>
|
||||
writeAttachments(attachments, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(exportAttachments);
|
||||
}
|
||||
|
||||
const quoteThumbnails = message.quote && message.quote.attachments;
|
||||
if (quoteThumbnails && quoteThumbnails.length > 0) {
|
||||
const exportQuoteThumbnails = () =>
|
||||
writeThumbnails(quoteThumbnails, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(exportQuoteThumbnails);
|
||||
}
|
||||
|
||||
const { contact } = message;
|
||||
if (contact && contact.length > 0) {
|
||||
const exportContactAvatars = () =>
|
||||
writeContactAvatars(contact, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(exportContactAvatars);
|
||||
}
|
||||
|
||||
count += 1;
|
||||
cursor.continue();
|
||||
} else {
|
||||
try {
|
||||
await Promise.all([stream.write(']}'), promiseChain, stream.close()]);
|
||||
} catch (error) {
|
||||
window.log.error(
|
||||
'exportConversation: error exporting conversation',
|
||||
name,
|
||||
':',
|
||||
error && error.stack ? error.stack : error
|
||||
);
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
window.log.info('done exporting conversation', name);
|
||||
resolve();
|
||||
while (!complete) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const collection = await window.Signal.Data.getMessagesByConversation(
|
||||
conversation.id,
|
||||
{
|
||||
limit: CHUNK_SIZE,
|
||||
receivedAt: lastReceivedAt,
|
||||
MessageCollection: Whisper.MessageCollection,
|
||||
}
|
||||
};
|
||||
});
|
||||
);
|
||||
const messages = getPlainJS(collection);
|
||||
|
||||
for (let i = 0, max = messages.length; i < max; i += 1) {
|
||||
const message = messages[i];
|
||||
if (count > 0) {
|
||||
stream.write(',');
|
||||
}
|
||||
|
||||
count += 1;
|
||||
|
||||
// skip message if it is disappearing, no matter the amount of time left
|
||||
if (message.expireTimer) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
const { attachments } = message;
|
||||
// eliminate attachment data from the JSON, since it will go to disk
|
||||
// Note: this is for legacy messages only, which stored attachment data in the db
|
||||
message.attachments = _.map(attachments, attachment =>
|
||||
_.omit(attachment, ['data'])
|
||||
);
|
||||
// completely drop any attachments in messages cached in error objects
|
||||
// TODO: move to lodash. Sadly, a number of the method signatures have changed!
|
||||
message.errors = _.map(message.errors, error => {
|
||||
if (error && error.args) {
|
||||
error.args = [];
|
||||
}
|
||||
if (error && error.stack) {
|
||||
error.stack = '';
|
||||
}
|
||||
return error;
|
||||
});
|
||||
|
||||
const jsonString = JSON.stringify(stringify(message));
|
||||
stream.write(jsonString);
|
||||
|
||||
if (attachments && attachments.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await writeAttachments(attachments, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
}
|
||||
|
||||
const quoteThumbnails = message.quote && message.quote.attachments;
|
||||
if (quoteThumbnails && quoteThumbnails.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await writeQuoteThumbnails(quoteThumbnails, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
}
|
||||
|
||||
const { contact } = message;
|
||||
if (contact && contact.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await writeContactAvatars(contact, {
|
||||
dir: attachmentsDir,
|
||||
name,
|
||||
message,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const last = messages.length > 0 ? messages[messages.length - 1] : null;
|
||||
if (last) {
|
||||
lastReceivedAt = last.received_at;
|
||||
}
|
||||
|
||||
if (messages.length < CHUNK_SIZE) {
|
||||
complete = true;
|
||||
}
|
||||
}
|
||||
|
||||
stream.write(']}');
|
||||
await stream.close();
|
||||
}
|
||||
|
||||
// Goals for directory names:
|
||||
|
@ -857,74 +795,40 @@ function _getConversationLoggingName(conversation) {
|
|||
return name;
|
||||
}
|
||||
|
||||
function exportConversations(db, options) {
|
||||
async function exportConversations(options) {
|
||||
options = options || {};
|
||||
const { messagesDir, attachmentsDir, key, newKey } = options;
|
||||
|
||||
if (!messagesDir) {
|
||||
return Promise.reject(new Error('Need a messages directory!'));
|
||||
throw new Error('Need a messages directory!');
|
||||
}
|
||||
if (!attachmentsDir) {
|
||||
return Promise.reject(new Error('Need an attachments directory!'));
|
||||
throw new Error('Need an attachments directory!');
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = db.transaction('conversations', 'readwrite');
|
||||
transaction.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
'exportConversations transaction error',
|
||||
transaction.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
transaction.oncomplete = () => {
|
||||
// not really very useful - fires at unexpected times
|
||||
};
|
||||
|
||||
let promiseChain = Promise.resolve();
|
||||
const store = transaction.objectStore('conversations');
|
||||
const request = store.openCursor();
|
||||
request.onerror = () => {
|
||||
Whisper.Database.handleDOMException(
|
||||
'exportConversations request error',
|
||||
request.error,
|
||||
reject
|
||||
);
|
||||
};
|
||||
request.onsuccess = async event => {
|
||||
const cursor = event.target.result;
|
||||
if (cursor && cursor.value) {
|
||||
const conversation = cursor.value;
|
||||
const dirName = _getConversationDirName(conversation);
|
||||
const name = _getConversationLoggingName(conversation);
|
||||
|
||||
const process = async () => {
|
||||
const dir = await createDirectory(messagesDir, dirName);
|
||||
return exportConversation(db, conversation, {
|
||||
name,
|
||||
dir,
|
||||
attachmentsDir,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
};
|
||||
|
||||
window.log.info('scheduling export for conversation', name);
|
||||
// eslint-disable-next-line more/no-then
|
||||
promiseChain = promiseChain.then(process);
|
||||
cursor.continue();
|
||||
} else {
|
||||
window.log.info('Done scheduling conversation exports');
|
||||
try {
|
||||
await promiseChain;
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
const collection = await window.Signal.Data.getAllConversations({
|
||||
ConversationCollection: Whisper.ConversationCollection,
|
||||
});
|
||||
const conversations = collection.models;
|
||||
|
||||
for (let i = 0, max = conversations.length; i < max; i += 1) {
|
||||
const conversation = conversations[i];
|
||||
const dirName = _getConversationDirName(conversation);
|
||||
const name = _getConversationLoggingName(conversation);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const dir = await createDirectory(messagesDir, dirName);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await exportConversation(conversation, {
|
||||
name,
|
||||
dir,
|
||||
attachmentsDir,
|
||||
key,
|
||||
newKey,
|
||||
});
|
||||
}
|
||||
|
||||
window.log.info('Done exporting conversations!');
|
||||
}
|
||||
|
||||
function getDirectory(options = {}) {
|
||||
|
@ -968,9 +872,30 @@ async function loadAttachments(dir, getName, options) {
|
|||
const { message } = options;
|
||||
|
||||
await Promise.all(
|
||||
_.map(message.attachments, (attachment, index) => {
|
||||
_.map(message.attachments, async (attachment, index) => {
|
||||
const name = getName(message, index, attachment);
|
||||
return readAttachment(dir, attachment, name, options);
|
||||
|
||||
await readEncryptedAttachment(dir, attachment, name, options);
|
||||
|
||||
if (attachment.thumbnail && _.isString(attachment.thumbnail.path)) {
|
||||
const thumbnailName = `${name}-thumbnail`;
|
||||
await readEncryptedAttachment(
|
||||
dir,
|
||||
attachment.thumbnail,
|
||||
thumbnailName,
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
if (attachment.screenshot && _.isString(attachment.screenshot.path)) {
|
||||
const screenshotName = `${name}-screenshot`;
|
||||
await readEncryptedAttachment(
|
||||
dir,
|
||||
attachment.screenshot,
|
||||
screenshotName,
|
||||
options
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -982,8 +907,8 @@ async function loadAttachments(dir, getName, options) {
|
|||
return null;
|
||||
}
|
||||
|
||||
const name = `${getName(message, index)}-thumbnail`;
|
||||
return readAttachment(dir, thumbnail, name, options);
|
||||
const name = `${getName(message, index)}-quote-thumbnail`;
|
||||
return readEncryptedAttachment(dir, thumbnail, name, options);
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -996,7 +921,7 @@ async function loadAttachments(dir, getName, options) {
|
|||
}
|
||||
|
||||
const name = `${getName(message, index)}-contact-avatar`;
|
||||
return readAttachment(dir, avatar, name, options);
|
||||
return readEncryptedAttachment(dir, avatar, name, options);
|
||||
})
|
||||
);
|
||||
|
||||
|
@ -1179,31 +1104,22 @@ function getDirectoryForExport() {
|
|||
return getDirectory();
|
||||
}
|
||||
|
||||
function createZip(zipDir, targetDir) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const target = path.join(zipDir, 'messages.zip');
|
||||
const output = fs.createWriteStream(target);
|
||||
const archive = archiver('zip', {
|
||||
async function compressArchive(file, targetDir) {
|
||||
const items = fs.readdirSync(targetDir);
|
||||
return tar.c(
|
||||
{
|
||||
gzip: true,
|
||||
file,
|
||||
cwd: targetDir,
|
||||
});
|
||||
},
|
||||
items
|
||||
);
|
||||
}
|
||||
|
||||
output.on('close', () => {
|
||||
resolve(target);
|
||||
});
|
||||
|
||||
archive.on('warning', error => {
|
||||
window.log.warn(`Archive generation warning: ${error.stack}`);
|
||||
});
|
||||
archive.on('error', reject);
|
||||
|
||||
archive.pipe(output);
|
||||
|
||||
// The empty string ensures that the base location of the files added to the zip
|
||||
// is nothing. If you provide null, you get the absolute path you pulled the files
|
||||
// from in the first place.
|
||||
archive.directory(targetDir, '');
|
||||
|
||||
archive.finalize();
|
||||
async function decompressArchive(file, targetDir) {
|
||||
return tar.x({
|
||||
file,
|
||||
cwd: targetDir,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1211,6 +1127,13 @@ function writeFile(targetPath, contents) {
|
|||
return pify(fs.writeFile)(targetPath, contents);
|
||||
}
|
||||
|
||||
// prettier-ignore
|
||||
const UNIQUE_ID = new Uint8Array([
|
||||
1, 3, 4, 5, 6, 7, 8, 11,
|
||||
23, 34, 1, 34, 3, 5, 45, 45,
|
||||
1, 3, 4, 5, 6, 7, 8, 11,
|
||||
23, 34, 1, 34, 3, 5, 45, 45,
|
||||
]);
|
||||
async function encryptFile(sourcePath, targetPath, options) {
|
||||
options = options || {};
|
||||
|
||||
|
@ -1220,8 +1143,8 @@ async function encryptFile(sourcePath, targetPath, options) {
|
|||
}
|
||||
|
||||
const plaintext = await readFileAsArrayBuffer(sourcePath);
|
||||
const ciphertext = await crypto.encryptSymmetric(key, plaintext);
|
||||
return writeFile(targetPath, ciphertext);
|
||||
const ciphertext = await crypto.encryptFile(key, UNIQUE_ID, plaintext);
|
||||
return writeFile(targetPath, Buffer.from(ciphertext));
|
||||
}
|
||||
|
||||
async function decryptFile(sourcePath, targetPath, options) {
|
||||
|
@ -1233,7 +1156,7 @@ async function decryptFile(sourcePath, targetPath, options) {
|
|||
}
|
||||
|
||||
const ciphertext = await readFileAsArrayBuffer(sourcePath);
|
||||
const plaintext = await crypto.decryptSymmetric(key, ciphertext);
|
||||
const plaintext = await crypto.decryptFile(key, UNIQUE_ID, ciphertext);
|
||||
return writeFile(targetPath, Buffer.from(plaintext));
|
||||
}
|
||||
|
||||
|
@ -1246,9 +1169,9 @@ function deleteAll(pattern) {
|
|||
return pify(rimraf)(pattern);
|
||||
}
|
||||
|
||||
async function exportToDirectory(directory, options) {
|
||||
throw new Error('Encrypted export/import is disabled');
|
||||
const ARCHIVE_NAME = 'messages.tar.gz';
|
||||
|
||||
async function exportToDirectory(directory, options) {
|
||||
options = options || {};
|
||||
|
||||
if (!options.key) {
|
||||
|
@ -1261,20 +1184,19 @@ async function exportToDirectory(directory, options) {
|
|||
stagingDir = await createTempDir();
|
||||
encryptionDir = await createTempDir();
|
||||
|
||||
const db = await Whisper.Database.open();
|
||||
const attachmentsDir = await createDirectory(directory, 'attachments');
|
||||
|
||||
await exportContactAndGroupsToFile(db, stagingDir);
|
||||
await exportContactAndGroupsToFile(stagingDir);
|
||||
await exportConversations(
|
||||
db,
|
||||
Object.assign({}, options, {
|
||||
messagesDir: stagingDir,
|
||||
attachmentsDir,
|
||||
})
|
||||
);
|
||||
|
||||
const zip = await createZip(encryptionDir, stagingDir);
|
||||
await encryptFile(zip, path.join(directory, 'messages.zip'), options);
|
||||
const archivePath = path.join(directory, ARCHIVE_NAME);
|
||||
await compressArchive(archivePath, stagingDir);
|
||||
await encryptFile(archivePath, path.join(directory, ARCHIVE_NAME), options);
|
||||
|
||||
window.log.info('done backing up!');
|
||||
return directory;
|
||||
|
@ -1317,10 +1239,8 @@ async function importFromDirectory(directory, options) {
|
|||
groupLookup,
|
||||
});
|
||||
|
||||
const zipPath = path.join(directory, 'messages.zip');
|
||||
if (fs.existsSync(zipPath)) {
|
||||
throw new Error('Encrypted export/import is disabled');
|
||||
|
||||
const archivePath = path.join(directory, ARCHIVE_NAME);
|
||||
if (fs.existsSync(archivePath)) {
|
||||
// we're in the world of an encrypted, zipped backup
|
||||
if (!options.key) {
|
||||
throw new Error(
|
||||
|
@ -1336,9 +1256,9 @@ async function importFromDirectory(directory, options) {
|
|||
|
||||
const attachmentsDir = path.join(directory, 'attachments');
|
||||
|
||||
const decryptedZip = path.join(decryptionDir, 'messages.zip');
|
||||
await decryptFile(zipPath, decryptedZip, options);
|
||||
await decompress(decryptedZip, stagingDir);
|
||||
const decryptedArchivePath = path.join(decryptionDir, ARCHIVE_NAME);
|
||||
await decryptFile(archivePath, decryptedArchivePath, options);
|
||||
await decompressArchive(decryptedArchivePath, stagingDir);
|
||||
|
||||
options = Object.assign({}, options, {
|
||||
attachmentsDir,
|
||||
|
|
|
@ -11,10 +11,14 @@ module.exports = {
|
|||
constantTimeEqual,
|
||||
decryptAesCtr,
|
||||
decryptDeviceName,
|
||||
decryptAttachment,
|
||||
decryptFile,
|
||||
decryptSymmetric,
|
||||
deriveAccessKey,
|
||||
encryptAesCtr,
|
||||
encryptDeviceName,
|
||||
encryptAttachment,
|
||||
encryptFile,
|
||||
encryptSymmetric,
|
||||
fromEncodedBinaryToArrayBuffer,
|
||||
getAccessKeyVerifier,
|
||||
|
@ -30,6 +34,24 @@ module.exports = {
|
|||
verifyAccessKey,
|
||||
};
|
||||
|
||||
function arrayBufferToBase64(arrayBuffer) {
|
||||
return dcodeIO.ByteBuffer.wrap(arrayBuffer).toString('base64');
|
||||
}
|
||||
function base64ToArrayBuffer(base64string) {
|
||||
return dcodeIO.ByteBuffer.wrap(base64string, 'base64').toArrayBuffer();
|
||||
}
|
||||
|
||||
function fromEncodedBinaryToArrayBuffer(key) {
|
||||
return dcodeIO.ByteBuffer.wrap(key, 'binary').toArrayBuffer();
|
||||
}
|
||||
|
||||
function bytesFromString(string) {
|
||||
return dcodeIO.ByteBuffer.wrap(string, 'utf8').toArrayBuffer();
|
||||
}
|
||||
function stringFromBytes(buffer) {
|
||||
return dcodeIO.ByteBuffer.wrap(buffer).toString('utf8');
|
||||
}
|
||||
|
||||
// High-level Operations
|
||||
|
||||
async function encryptDeviceName(deviceName, identityPublic) {
|
||||
|
@ -81,6 +103,48 @@ async function decryptDeviceName(
|
|||
return stringFromBytes(plaintext);
|
||||
}
|
||||
|
||||
// Path structure: 'fa/facdf99c22945b1c9393345599a276f4b36ad7ccdc8c2467f5441b742c2d11fa'
|
||||
function getAttachmentLabel(path) {
|
||||
const filename = path.slice(3);
|
||||
return base64ToArrayBuffer(filename);
|
||||
}
|
||||
|
||||
const PUB_KEY_LENGTH = 32;
|
||||
async function encryptAttachment(staticPublicKey, path, plaintext) {
|
||||
const uniqueId = getAttachmentLabel(path);
|
||||
return encryptFile(staticPublicKey, uniqueId, plaintext);
|
||||
}
|
||||
|
||||
async function decryptAttachment(staticPrivateKey, path, data) {
|
||||
const uniqueId = getAttachmentLabel(path);
|
||||
return decryptFile(staticPrivateKey, uniqueId, data);
|
||||
}
|
||||
|
||||
async function encryptFile(staticPublicKey, uniqueId, plaintext) {
|
||||
const ephemeralKeyPair = await libsignal.KeyHelper.generateIdentityKeyPair();
|
||||
const agreement = await libsignal.Curve.async.calculateAgreement(
|
||||
staticPublicKey,
|
||||
ephemeralKeyPair.privKey
|
||||
);
|
||||
const key = await hmacSha256(agreement, uniqueId);
|
||||
|
||||
const prefix = ephemeralKeyPair.pubKey.slice(1);
|
||||
return concatenateBytes(prefix, await encryptSymmetric(key, plaintext));
|
||||
}
|
||||
|
||||
async function decryptFile(staticPrivateKey, uniqueId, data) {
|
||||
const ephemeralPublicKey = _getFirstBytes(data, PUB_KEY_LENGTH);
|
||||
const ciphertext = _getBytes(data, PUB_KEY_LENGTH, data.byteLength);
|
||||
const agreement = await libsignal.Curve.async.calculateAgreement(
|
||||
ephemeralPublicKey,
|
||||
staticPrivateKey
|
||||
);
|
||||
|
||||
const key = await hmacSha256(agreement, uniqueId);
|
||||
|
||||
return decryptSymmetric(key, ciphertext);
|
||||
}
|
||||
|
||||
async function deriveAccessKey(profileKey) {
|
||||
const iv = getZeroes(12);
|
||||
const plaintext = getZeroes(16);
|
||||
|
@ -318,24 +382,6 @@ function trimBytes(buffer, length) {
|
|||
return _getFirstBytes(buffer, length);
|
||||
}
|
||||
|
||||
function arrayBufferToBase64(arrayBuffer) {
|
||||
return dcodeIO.ByteBuffer.wrap(arrayBuffer).toString('base64');
|
||||
}
|
||||
function base64ToArrayBuffer(base64string) {
|
||||
return dcodeIO.ByteBuffer.wrap(base64string, 'base64').toArrayBuffer();
|
||||
}
|
||||
|
||||
function fromEncodedBinaryToArrayBuffer(key) {
|
||||
return dcodeIO.ByteBuffer.wrap(key, 'binary').toArrayBuffer();
|
||||
}
|
||||
|
||||
function bytesFromString(string) {
|
||||
return dcodeIO.ByteBuffer.wrap(string, 'utf8').toArrayBuffer();
|
||||
}
|
||||
function stringFromBytes(buffer) {
|
||||
return dcodeIO.ByteBuffer.wrap(buffer).toString('utf8');
|
||||
}
|
||||
|
||||
function getViewOfArrayBuffer(buffer, start, finish) {
|
||||
const source = new Uint8Array(buffer);
|
||||
const result = source.slice(start, finish);
|
||||
|
|
|
@ -50,6 +50,7 @@ module.exports = {
|
|||
createOrUpdateGroup,
|
||||
getGroupById,
|
||||
getAllGroupIds,
|
||||
getAllGroups,
|
||||
bulkAddGroups,
|
||||
removeGroupById,
|
||||
removeAllGroups,
|
||||
|
@ -395,6 +396,10 @@ async function getAllGroupIds() {
|
|||
const ids = await channels.getAllGroupIds();
|
||||
return ids;
|
||||
}
|
||||
async function getAllGroups() {
|
||||
const groups = await channels.getAllGroups();
|
||||
return groups;
|
||||
}
|
||||
async function bulkAddGroups(array) {
|
||||
await channels.bulkAddGroups(array);
|
||||
}
|
||||
|
|
|
@ -133,6 +133,7 @@ function initializeMigrations({
|
|||
loadAttachmentData,
|
||||
loadQuoteData,
|
||||
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
|
||||
readAttachmentData,
|
||||
run,
|
||||
upgradeMessageSchema: (message, options = {}) => {
|
||||
const { maxVersion } = options;
|
||||
|
|
|
@ -545,8 +545,6 @@ exports.createAttachmentDataWriter = ({
|
|||
});
|
||||
};
|
||||
|
||||
// TODO: need to handle attachment thumbnails and video screenshots
|
||||
|
||||
const messageWithoutAttachmentData = Object.assign(
|
||||
{},
|
||||
await writeThumbnails(message, { logger }),
|
||||
|
@ -555,7 +553,23 @@ exports.createAttachmentDataWriter = ({
|
|||
attachments: await Promise.all(
|
||||
(attachments || []).map(async attachment => {
|
||||
await writeExistingAttachmentData(attachment);
|
||||
return omit(attachment, ['data']);
|
||||
|
||||
if (attachment.screenshot && attachment.screenshot.data) {
|
||||
await writeExistingAttachmentData(attachment.screenshot);
|
||||
}
|
||||
if (attachment.thumbnail && attachment.thumbnail.data) {
|
||||
await writeExistingAttachmentData(attachment.thumbnail);
|
||||
}
|
||||
|
||||
return {
|
||||
...omit(attachment, ['data']),
|
||||
...(attachment.thumbnail
|
||||
? { thumbnail: omit(attachment.thumbnail, ['data']) }
|
||||
: null),
|
||||
...(attachment.screenshot
|
||||
? { screenshot: omit(attachment.screenshot, ['data']) }
|
||||
: null),
|
||||
};
|
||||
})
|
||||
),
|
||||
}
|
||||
|
|
|
@ -43,7 +43,6 @@
|
|||
"dependencies": {
|
||||
"@journeyapps/sqlcipher": "https://github.com/scottnonnenberg-signal/node-sqlcipher.git#ed4f4d179ac010c6347b291cbd4c2ebe5c773741",
|
||||
"@sindresorhus/is": "0.8.0",
|
||||
"archiver": "2.1.1",
|
||||
"backbone": "1.3.3",
|
||||
"blob-util": "1.3.0",
|
||||
"blueimp-canvas-to-blob": "3.14.0",
|
||||
|
@ -86,6 +85,7 @@
|
|||
"rimraf": "2.6.2",
|
||||
"semver": "5.4.1",
|
||||
"spellchecker": "3.4.4",
|
||||
"tar": "4.4.8",
|
||||
"testcheck": "1.0.0-rc.2",
|
||||
"tmp": "0.0.33",
|
||||
"to-arraybuffer": "1.0.1",
|
||||
|
|
13
preload.js
13
preload.js
|
@ -274,3 +274,16 @@ window.Signal.Logs = require('./js/modules/logs');
|
|||
// We pull this in last, because the native module involved appears to be sensitive to
|
||||
// /tmp mounted as noexec on Linux.
|
||||
require('./js/spell_check');
|
||||
|
||||
if (config.environment === 'test') {
|
||||
/* eslint-disable global-require, import/no-extraneous-dependencies */
|
||||
window.test = {
|
||||
glob: require('glob'),
|
||||
fse: require('fs-extra'),
|
||||
tmp: require('tmp'),
|
||||
path: require('path'),
|
||||
basePath: __dirname,
|
||||
attachmentsPath: window.Signal.Migrations.attachmentsPath,
|
||||
};
|
||||
/* eslint-enable global-require, import/no-extraneous-dependencies */
|
||||
}
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
/* global Signal: false */
|
||||
/* global Whisper: false */
|
||||
/* global assert: false */
|
||||
/* global textsecure: false */
|
||||
/* global _: false */
|
||||
/* global Signal, Whisper, assert, textsecure, _, libsignal */
|
||||
|
||||
/* eslint-disable no-unreachable, no-console */
|
||||
/* eslint-disable no-console */
|
||||
|
||||
'use strict';
|
||||
|
||||
|
@ -240,8 +236,8 @@ describe('Backup', () => {
|
|||
});
|
||||
|
||||
describe('end-to-end', () => {
|
||||
it('exports then imports to produce the same data we started with', async () => {
|
||||
return;
|
||||
it('exports then imports to produce the same data we started with', async function thisNeeded() {
|
||||
this.timeout(6000);
|
||||
|
||||
const { attachmentsPath, fse, glob, path, tmp } = window.test;
|
||||
const {
|
||||
|
@ -249,46 +245,32 @@ describe('Backup', () => {
|
|||
loadAttachmentData,
|
||||
} = window.Signal.Migrations;
|
||||
|
||||
const key = new Uint8Array([
|
||||
1,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
11,
|
||||
23,
|
||||
34,
|
||||
1,
|
||||
34,
|
||||
3,
|
||||
5,
|
||||
45,
|
||||
45,
|
||||
1,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
11,
|
||||
23,
|
||||
34,
|
||||
1,
|
||||
34,
|
||||
3,
|
||||
5,
|
||||
45,
|
||||
45,
|
||||
]);
|
||||
const staticKeyPair = await libsignal.KeyHelper.generateIdentityKeyPair();
|
||||
const attachmentsPattern = path.join(attachmentsPath, '**');
|
||||
|
||||
const OUR_NUMBER = '+12025550000';
|
||||
const CONTACT_ONE_NUMBER = '+12025550001';
|
||||
const CONTACT_TWO_NUMBER = '+12025550002';
|
||||
|
||||
const toArrayBuffer = nodeBuffer =>
|
||||
nodeBuffer.buffer.slice(
|
||||
nodeBuffer.byteOffset,
|
||||
nodeBuffer.byteOffset + nodeBuffer.byteLength
|
||||
);
|
||||
|
||||
const getFixture = target => toArrayBuffer(fse.readFileSync(target));
|
||||
|
||||
const FIXTURES = {
|
||||
gif: getFixture('fixtures/giphy-7GFfijngKbeNy.gif'),
|
||||
mp4: getFixture('fixtures/pixabay-Soap-Bubble-7141.mp4'),
|
||||
jpg: getFixture('fixtures/koushik-chowdavarapu-105425-unsplash.jpg'),
|
||||
mp3: getFixture('fixtures/incompetech-com-Agnus-Dei-X.mp3'),
|
||||
txt: getFixture('fixtures/lorem-ipsum.txt'),
|
||||
png: getFixture(
|
||||
'fixtures/freepngs-2cd43b_bed7d1327e88454487397574d87b64dc_mv2.png'
|
||||
),
|
||||
};
|
||||
|
||||
async function wrappedLoadAttachment(attachment) {
|
||||
return _.omit(await loadAttachmentData(attachment), ['path']);
|
||||
}
|
||||
|
@ -376,16 +358,30 @@ describe('Backup', () => {
|
|||
})
|
||||
),
|
||||
attachments: await Promise.all(
|
||||
(message.attachments || []).map(attachment =>
|
||||
wrappedLoadAttachment(attachment)
|
||||
)
|
||||
(message.attachments || []).map(async attachment => {
|
||||
await wrappedLoadAttachment(attachment);
|
||||
|
||||
if (attachment.thumbnail) {
|
||||
await wrappedLoadAttachment(attachment.thumbnail);
|
||||
}
|
||||
|
||||
if (attachment.screenshot) {
|
||||
await wrappedLoadAttachment(attachment.screenshot);
|
||||
}
|
||||
|
||||
return attachment;
|
||||
})
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
let backupDir;
|
||||
try {
|
||||
const ATTACHMENT_COUNT = 3;
|
||||
// Seven total:
|
||||
// - Five from image/video attachments
|
||||
// - One from embedded contact avatar
|
||||
// - Another from embedded quoted attachment thumbnail
|
||||
const ATTACHMENT_COUNT = 7;
|
||||
const MESSAGE_COUNT = 1;
|
||||
const CONVERSATION_COUNT = 1;
|
||||
|
||||
|
@ -397,47 +393,20 @@ describe('Backup', () => {
|
|||
timestamp: 1524185933350,
|
||||
errors: [],
|
||||
attachments: [
|
||||
// Note: generates two more files: screenshot and thumbnail
|
||||
{
|
||||
contentType: 'image/gif',
|
||||
fileName: 'sad_cat.gif',
|
||||
data: new Uint8Array([
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
]).buffer,
|
||||
contentType: 'video/mp4',
|
||||
fileName: 'video.mp4',
|
||||
data: FIXTURES.mp4,
|
||||
},
|
||||
// Note: generates one more file: thumbnail
|
||||
{
|
||||
contentType: 'image/png',
|
||||
fileName: 'landscape.png',
|
||||
data: FIXTURES.png,
|
||||
},
|
||||
],
|
||||
hasAttachments: 1,
|
||||
hasFileAttachments: undefined,
|
||||
hasVisualMediaAttachments: 1,
|
||||
quote: {
|
||||
text: "Isn't it cute?",
|
||||
|
@ -450,43 +419,10 @@ describe('Backup', () => {
|
|||
},
|
||||
{
|
||||
contentType: 'image/gif',
|
||||
fileName: 'happy_cat.gif',
|
||||
fileName: 'avatar.gif',
|
||||
thumbnail: {
|
||||
contentType: 'image/png',
|
||||
data: new Uint8Array([
|
||||
2,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
]).buffer,
|
||||
data: FIXTURES.gif,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -506,40 +442,7 @@ describe('Backup', () => {
|
|||
isProfile: false,
|
||||
avatar: {
|
||||
contentType: 'image/png',
|
||||
data: new Uint8Array([
|
||||
3,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
]).buffer,
|
||||
data: FIXTURES.png,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -552,107 +455,30 @@ describe('Backup', () => {
|
|||
console.log('Backup test: Create models, save to db/disk');
|
||||
const message = await upgradeMessageSchema(messageWithAttachments);
|
||||
console.log({ message });
|
||||
const messageModel = new Whisper.Message(message);
|
||||
const id = await window.Signal.Data.saveMessage(
|
||||
messageModel.attributes,
|
||||
{
|
||||
Message: Whisper.Message,
|
||||
}
|
||||
);
|
||||
messageModel.set({ id });
|
||||
await window.Signal.Data.saveMessage(message, {
|
||||
Message: Whisper.Message,
|
||||
});
|
||||
|
||||
const conversation = {
|
||||
active_at: 1524185933350,
|
||||
color: 'orange',
|
||||
expireTimer: 0,
|
||||
id: CONTACT_ONE_NUMBER,
|
||||
lastMessage: 'Heyo!',
|
||||
name: 'Someone Somewhere',
|
||||
profileAvatar: {
|
||||
contentType: 'image/jpeg',
|
||||
data: new Uint8Array([
|
||||
4,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
]).buffer,
|
||||
data: FIXTURES.jpeg,
|
||||
size: 64,
|
||||
},
|
||||
profileKey: new Uint8Array([
|
||||
5,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
]).buffer,
|
||||
profileKey: 'BASE64KEY',
|
||||
profileName: 'Someone! 🤔',
|
||||
profileSharing: true,
|
||||
timestamp: 1524185933350,
|
||||
tokens: [
|
||||
'someone somewhere',
|
||||
'someone',
|
||||
'somewhere',
|
||||
'2025550001',
|
||||
'12025550001',
|
||||
],
|
||||
type: 'private',
|
||||
unreadCount: 0,
|
||||
verified: 0,
|
||||
sealedSender: 0,
|
||||
version: 2,
|
||||
};
|
||||
console.log({ conversation });
|
||||
await window.Signal.Data.saveConversation(conversation, {
|
||||
|
@ -669,11 +495,13 @@ describe('Backup', () => {
|
|||
console.log('Backup test: Export!');
|
||||
backupDir = tmp.dirSync().name;
|
||||
console.log({ backupDir });
|
||||
await Signal.Backup.exportToDirectory(backupDir, { key });
|
||||
await Signal.Backup.exportToDirectory(backupDir, {
|
||||
key: staticKeyPair.pubKey,
|
||||
});
|
||||
|
||||
console.log('Backup test: Ensure that messages.zip exists');
|
||||
const zipPath = path.join(backupDir, 'messages.zip');
|
||||
const messageZipExists = fse.existsSync(zipPath);
|
||||
console.log('Backup test: Ensure that messages.tar.gz exists');
|
||||
const archivePath = path.join(backupDir, 'messages.tar.gz');
|
||||
const messageZipExists = fse.existsSync(archivePath);
|
||||
assert.strictEqual(true, messageZipExists);
|
||||
|
||||
console.log(
|
||||
|
@ -688,43 +516,9 @@ describe('Backup', () => {
|
|||
await clearAllData();
|
||||
|
||||
console.log('Backup test: Import!');
|
||||
await Signal.Backup.importFromDirectory(backupDir, { key });
|
||||
|
||||
console.log('Backup test: ensure that all attachments were imported');
|
||||
const recreatedAttachmentFiles = removeDirs(
|
||||
glob.sync(attachmentsPattern)
|
||||
);
|
||||
console.log({ recreatedAttachmentFiles });
|
||||
assert.strictEqual(ATTACHMENT_COUNT, recreatedAttachmentFiles.length);
|
||||
assert.deepEqual(attachmentFiles, recreatedAttachmentFiles);
|
||||
|
||||
console.log('Backup test: Check messages');
|
||||
const messageCollection = await window.Signal.Data.getAllMessages({
|
||||
MessageCollection: Whisper.MessageCollection,
|
||||
await Signal.Backup.importFromDirectory(backupDir, {
|
||||
key: staticKeyPair.privKey,
|
||||
});
|
||||
assert.strictEqual(messageCollection.length, MESSAGE_COUNT);
|
||||
const messageFromDB = removeId(messageCollection.at(0).attributes);
|
||||
const expectedMessage = omitUndefinedKeys(message);
|
||||
console.log({ messageFromDB, expectedMessage });
|
||||
assert.deepEqual(messageFromDB, expectedMessage);
|
||||
|
||||
console.log(
|
||||
'Backup test: Check that all attachments were successfully imported'
|
||||
);
|
||||
const messageWithAttachmentsFromDB = await loadAllFilesFromDisk(
|
||||
messageFromDB
|
||||
);
|
||||
const expectedMessageWithAttachments = omitUndefinedKeys(
|
||||
messageWithAttachments
|
||||
);
|
||||
console.log({
|
||||
messageWithAttachmentsFromDB,
|
||||
expectedMessageWithAttachments,
|
||||
});
|
||||
assert.deepEqual(
|
||||
_.omit(messageWithAttachmentsFromDB, ['schemaVersion']),
|
||||
expectedMessageWithAttachments
|
||||
);
|
||||
|
||||
console.log('Backup test: Check conversations');
|
||||
const conversationCollection = await window.Signal.Data.getAllConversations(
|
||||
|
@ -741,6 +535,42 @@ describe('Backup', () => {
|
|||
_.omit(conversation, ['profileAvatar'])
|
||||
);
|
||||
|
||||
console.log('Backup test: Check messages');
|
||||
const messageCollection = await window.Signal.Data.getAllMessages({
|
||||
MessageCollection: Whisper.MessageCollection,
|
||||
});
|
||||
assert.strictEqual(messageCollection.length, MESSAGE_COUNT);
|
||||
const messageFromDB = removeId(messageCollection.at(0).attributes);
|
||||
const expectedMessage = messageFromDB;
|
||||
console.log({ messageFromDB, expectedMessage });
|
||||
assert.deepEqual(messageFromDB, expectedMessage);
|
||||
|
||||
console.log('Backup test: ensure that all attachments were imported');
|
||||
const recreatedAttachmentFiles = removeDirs(
|
||||
glob.sync(attachmentsPattern)
|
||||
);
|
||||
console.log({ recreatedAttachmentFiles });
|
||||
assert.strictEqual(ATTACHMENT_COUNT, recreatedAttachmentFiles.length);
|
||||
assert.deepEqual(attachmentFiles, recreatedAttachmentFiles);
|
||||
|
||||
console.log(
|
||||
'Backup test: Check that all attachments were successfully imported'
|
||||
);
|
||||
const messageWithAttachmentsFromDB = await loadAllFilesFromDisk(
|
||||
messageFromDB
|
||||
);
|
||||
const expectedMessageWithAttachments = await loadAllFilesFromDisk(
|
||||
omitUndefinedKeys(message)
|
||||
);
|
||||
console.log({
|
||||
messageWithAttachmentsFromDB,
|
||||
expectedMessageWithAttachments,
|
||||
});
|
||||
assert.deepEqual(
|
||||
messageWithAttachmentsFromDB,
|
||||
expectedMessageWithAttachments
|
||||
);
|
||||
|
||||
console.log('Backup test: Clear all data');
|
||||
await clearAllData();
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ describe('Crypto', () => {
|
|||
|
||||
const encrypted = await Signal.Crypto.encryptSymmetric(key, plaintext);
|
||||
const uintArray = new Uint8Array(encrypted);
|
||||
uintArray[2] = 9;
|
||||
uintArray[2] += 2;
|
||||
|
||||
try {
|
||||
await Signal.Crypto.decryptSymmetric(key, uintArray.buffer);
|
||||
|
@ -69,7 +69,7 @@ describe('Crypto', () => {
|
|||
|
||||
const encrypted = await Signal.Crypto.encryptSymmetric(key, plaintext);
|
||||
const uintArray = new Uint8Array(encrypted);
|
||||
uintArray[uintArray.length - 3] = 9;
|
||||
uintArray[uintArray.length - 3] += 2;
|
||||
|
||||
try {
|
||||
await Signal.Crypto.decryptSymmetric(key, uintArray.buffer);
|
||||
|
@ -94,7 +94,7 @@ describe('Crypto', () => {
|
|||
|
||||
const encrypted = await Signal.Crypto.encryptSymmetric(key, plaintext);
|
||||
const uintArray = new Uint8Array(encrypted);
|
||||
uintArray[35] = 9;
|
||||
uintArray[35] += 9;
|
||||
|
||||
try {
|
||||
await Signal.Crypto.decryptSymmetric(key, uintArray.buffer);
|
||||
|
@ -146,4 +146,30 @@ describe('Crypto', () => {
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('attachment encryption', () => {
|
||||
it('roundtrips', async () => {
|
||||
const staticKeyPair = await libsignal.KeyHelper.generateIdentityKeyPair();
|
||||
const message = 'this is my message';
|
||||
const plaintext = Signal.Crypto.bytesFromString(message);
|
||||
const path =
|
||||
'fa/facdf99c22945b1c9393345599a276f4b36ad7ccdc8c2467f5441b742c2d11fa';
|
||||
|
||||
const encrypted = await Signal.Crypto.encryptAttachment(
|
||||
staticKeyPair.pubKey.slice(1),
|
||||
path,
|
||||
plaintext
|
||||
);
|
||||
const decrypted = await Signal.Crypto.decryptAttachment(
|
||||
staticKeyPair.privKey,
|
||||
path,
|
||||
encrypted
|
||||
);
|
||||
|
||||
const equal = Signal.Crypto.constantTimeEqual(plaintext, decrypted);
|
||||
if (!equal) {
|
||||
throw new Error('The output and input did not match!');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -148,7 +148,9 @@ InMemorySignalProtocolStore.prototype = {
|
|||
};
|
||||
|
||||
describe('SecretSessionCipher', () => {
|
||||
it('successfully roundtrips', async () => {
|
||||
it('successfully roundtrips', async function thisNeeded() {
|
||||
this.timeout(4000);
|
||||
|
||||
const aliceStore = new InMemorySignalProtocolStore();
|
||||
const bobStore = new InMemorySignalProtocolStore();
|
||||
|
||||
|
@ -187,7 +189,9 @@ describe('SecretSessionCipher', () => {
|
|||
assert.strictEqual(decryptResult.sender.toString(), '+14151111111.1');
|
||||
});
|
||||
|
||||
it('fails when untrusted', async () => {
|
||||
it('fails when untrusted', async function thisNeeded() {
|
||||
this.timeout(4000);
|
||||
|
||||
const aliceStore = new InMemorySignalProtocolStore();
|
||||
const bobStore = new InMemorySignalProtocolStore();
|
||||
|
||||
|
@ -226,7 +230,9 @@ describe('SecretSessionCipher', () => {
|
|||
}
|
||||
});
|
||||
|
||||
it('fails when expired', async () => {
|
||||
it('fails when expired', async function thisNeeded() {
|
||||
this.timeout(4000);
|
||||
|
||||
const aliceStore = new InMemorySignalProtocolStore();
|
||||
const bobStore = new InMemorySignalProtocolStore();
|
||||
|
||||
|
@ -264,7 +270,9 @@ describe('SecretSessionCipher', () => {
|
|||
}
|
||||
});
|
||||
|
||||
it('fails when wrong identity', async () => {
|
||||
it('fails when wrong identity', async function thisNeeded() {
|
||||
this.timeout(4000);
|
||||
|
||||
const aliceStore = new InMemorySignalProtocolStore();
|
||||
const bobStore = new InMemorySignalProtocolStore();
|
||||
|
||||
|
|
|
@ -319,7 +319,7 @@
|
|||
"rule": "jQuery-wrap(",
|
||||
"path": "js/modules/crypto.js",
|
||||
"line": " return dcodeIO.ByteBuffer.wrap(arrayBuffer).toString('base64');",
|
||||
"lineNumber": 322,
|
||||
"lineNumber": 38,
|
||||
"reasonCategory": "falseMatch",
|
||||
"updated": "2018-10-05T23:12:28.961Z"
|
||||
},
|
||||
|
@ -327,7 +327,7 @@
|
|||
"rule": "jQuery-wrap(",
|
||||
"path": "js/modules/crypto.js",
|
||||
"line": " return dcodeIO.ByteBuffer.wrap(base64string, 'base64').toArrayBuffer();",
|
||||
"lineNumber": 325,
|
||||
"lineNumber": 41,
|
||||
"reasonCategory": "falseMatch",
|
||||
"updated": "2018-10-05T23:12:28.961Z"
|
||||
},
|
||||
|
@ -335,7 +335,7 @@
|
|||
"rule": "jQuery-wrap(",
|
||||
"path": "js/modules/crypto.js",
|
||||
"line": " return dcodeIO.ByteBuffer.wrap(key, 'binary').toArrayBuffer();",
|
||||
"lineNumber": 329,
|
||||
"lineNumber": 45,
|
||||
"reasonCategory": "falseMatch",
|
||||
"updated": "2018-10-05T23:12:28.961Z"
|
||||
},
|
||||
|
@ -343,7 +343,7 @@
|
|||
"rule": "jQuery-wrap(",
|
||||
"path": "js/modules/crypto.js",
|
||||
"line": " return dcodeIO.ByteBuffer.wrap(string, 'utf8').toArrayBuffer();",
|
||||
"lineNumber": 333,
|
||||
"lineNumber": 49,
|
||||
"reasonCategory": "falseMatch",
|
||||
"updated": "2018-10-05T23:12:28.961Z"
|
||||
},
|
||||
|
@ -351,7 +351,7 @@
|
|||
"rule": "jQuery-wrap(",
|
||||
"path": "js/modules/crypto.js",
|
||||
"line": " return dcodeIO.ByteBuffer.wrap(buffer).toString('utf8');",
|
||||
"lineNumber": 336,
|
||||
"lineNumber": 52,
|
||||
"reasonCategory": "falseMatch",
|
||||
"updated": "2018-10-05T23:12:28.961Z"
|
||||
},
|
||||
|
|
49
yarn.lock
49
yarn.lock
|
@ -391,20 +391,6 @@ archiver-utils@^1.3.0:
|
|||
normalize-path "^2.0.0"
|
||||
readable-stream "^2.0.0"
|
||||
|
||||
archiver@2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/archiver/-/archiver-2.1.1.tgz#ff662b4a78201494a3ee544d3a33fe7496509ebc"
|
||||
integrity sha1-/2YrSnggFJSj7lRNOjP+dJZQnrw=
|
||||
dependencies:
|
||||
archiver-utils "^1.3.0"
|
||||
async "^2.0.0"
|
||||
buffer-crc32 "^0.2.1"
|
||||
glob "^7.0.0"
|
||||
lodash "^4.8.0"
|
||||
readable-stream "^2.0.0"
|
||||
tar-stream "^1.5.0"
|
||||
zip-stream "^1.2.0"
|
||||
|
||||
archiver@~2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/archiver/-/archiver-2.1.0.tgz#d2df2e8d5773a82c1dcce925ccc41450ea999afd"
|
||||
|
@ -1363,6 +1349,11 @@ chownr@^1.0.1:
|
|||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181"
|
||||
|
||||
chownr@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.1.tgz#54726b8b8fff4df053c42187e801fb4412df1494"
|
||||
integrity sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==
|
||||
|
||||
chrome-trace-event@^0.1.1:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-0.1.2.tgz#90f36885d5345a50621332f0717b595883d5d982"
|
||||
|
@ -3104,6 +3095,7 @@ file-sync-cmp@^0.1.0:
|
|||
file-type@^3.1.0:
|
||||
version "3.9.0"
|
||||
resolved "https://registry.yarnpkg.com/file-type/-/file-type-3.9.0.tgz#257a078384d1db8087bc449d107d52a52672b9e9"
|
||||
integrity sha1-JXoHg4TR24CHvESdEH1SpSZyuek=
|
||||
|
||||
file-uri-to-path@1:
|
||||
version "1.0.0"
|
||||
|
@ -5485,12 +5477,27 @@ minipass@^2.2.1, minipass@^2.3.3:
|
|||
safe-buffer "^5.1.2"
|
||||
yallist "^3.0.0"
|
||||
|
||||
minipass@^2.3.4:
|
||||
version "2.3.5"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848"
|
||||
integrity sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==
|
||||
dependencies:
|
||||
safe-buffer "^5.1.2"
|
||||
yallist "^3.0.0"
|
||||
|
||||
minizlib@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.0.tgz#11e13658ce46bc3a70a267aac58359d1e0c29ceb"
|
||||
dependencies:
|
||||
minipass "^2.2.1"
|
||||
|
||||
minizlib@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.1.tgz#6734acc045a46e61d596a43bb9d9cd326e19cc42"
|
||||
integrity sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg==
|
||||
dependencies:
|
||||
minipass "^2.2.1"
|
||||
|
||||
mississippi@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-2.0.0.tgz#3442a508fafc28500486feea99409676e4ee5a6f"
|
||||
|
@ -8319,6 +8326,7 @@ string-width@^2.1.0, string-width@^2.1.1:
|
|||
string_decoder@^1.0.0:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
|
@ -8486,6 +8494,19 @@ tar-stream@^1.5.0:
|
|||
readable-stream "^2.0.0"
|
||||
xtend "^4.0.0"
|
||||
|
||||
tar@4.4.8:
|
||||
version "4.4.8"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.8.tgz#b19eec3fde2a96e64666df9fdb40c5ca1bc3747d"
|
||||
integrity sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ==
|
||||
dependencies:
|
||||
chownr "^1.1.1"
|
||||
fs-minipass "^1.2.5"
|
||||
minipass "^2.3.4"
|
||||
minizlib "^1.1.1"
|
||||
mkdirp "^0.5.0"
|
||||
safe-buffer "^5.1.2"
|
||||
yallist "^3.0.2"
|
||||
|
||||
tar@^2.0.0:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1"
|
||||
|
|
Loading…
Reference in a new issue