Backup encryption and improvements

This commit is contained in:
Fedor Indutny 2024-04-15 22:54:21 +02:00 committed by GitHub
parent d2850bdbd9
commit 87ea909ae9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 775 additions and 295 deletions

View file

@ -18,8 +18,7 @@ message Frame {
Recipient recipient = 2;
Chat chat = 3;
ChatItem chatItem = 4;
Call call = 5;
StickerPack stickerPack = 6;
StickerPack stickerPack = 5;
}
}
@ -167,32 +166,6 @@ message Identity {
bool nonblockingApproval = 6;
}
message Call {
enum Type {
UNKNOWN_TYPE = 0;
AUDIO_CALL = 1;
VIDEO_CALL = 2;
GROUP_CALL = 3;
AD_HOC_CALL = 4;
}
enum State {
UNKNOWN_EVENT = 0;
COMPLETED = 1; // A call that was successfully completed or was accepted and in-progress at the time of the backup.
DECLINED_BY_USER = 2; // An incoming call that was manually declined by the user.
DECLINED_BY_NOTIFICATION_PROFILE = 3; // An incoming call that was automatically declined by an active notification profile.
MISSED = 4; // An incoming call that either expired, was cancelled by the sender, or was auto-rejected due to already being in a different call.
}
uint64 callId = 1;
uint64 conversationRecipientId = 2;
Type type = 3;
bool outgoing = 4;
uint64 timestamp = 5;
optional uint64 ringerRecipientId = 6;
State state = 7;
}
message ChatItem {
message IncomingMessageDetails {
uint64 dateReceived = 1;
@ -384,6 +357,7 @@ message MessageAttachment {
FilePointer pointer = 1;
Flag flag = 2;
bool wasDownloaded = 3;
}
message FilePointer {
@ -391,6 +365,9 @@ message FilePointer {
message BackupLocator {
string mediaName = 1;
uint32 cdnNumber = 2;
bytes key = 3;
bytes digest = 4;
uint32 size = 5;
}
// References attachments in the transit storage tier.
@ -401,37 +378,33 @@ message FilePointer {
string cdnKey = 1;
uint32 cdnNumber = 2;
uint64 uploadTimestamp = 3;
bytes key = 4;
bytes digest = 5;
uint32 size = 6;
}
// An attachment that was copied from the transit storage tier
// to the backup (media) storage tier up without being downloaded.
// Its MediaName should be generated as {sender_aci}_{cdn_attachment_key},
// but should eventually transition to a BackupLocator with mediaName
// being the content hash once it is downloaded.
message UndownloadedBackupLocator {
bytes senderAci = 1;
string cdnKey = 2;
uint32 cdnNumber = 3;
// References attachments that are invalid in such a way where download
// cannot be attempted. Could range from missing digests to missing
// CDN keys or anything else that makes download attempts impossible.
// This serves as a 'tombstone' so that the UX can show that an attachment
// did exist, but for whatever reason it's not retrievable.
message InvalidAttachmentLocator {
}
oneof locator {
BackupLocator backupLocator = 1;
AttachmentLocator attachmentLocator= 2;
UndownloadedBackupLocator undownloadedBackupLocator = 3;
InvalidAttachmentLocator invalidAttachmentLocator = 3;
}
optional bytes key = 5;
optional string contentType = 6;
// Size of fullsize decrypted media blob in bytes.
// Can be ignored if unset/unavailable.
optional uint32 size = 7;
optional bytes incrementalMac = 8;
optional uint32 incrementalMacChunkSize = 9;
optional string fileName = 10;
optional uint32 width = 11;
optional uint32 height = 12;
optional string caption = 13;
optional string blurHash = 14;
optional string contentType = 4;
optional bytes incrementalMac = 5;
optional uint32 incrementalMacChunkSize = 6;
optional string fileName = 7;
optional uint32 width = 8;
optional uint32 height = 9;
optional string caption = 10;
optional string blurHash = 11;
}
message Quote {
@ -495,13 +468,40 @@ message ChatUpdateMessage {
}
message CallChatUpdate{
oneof call {
uint64 callId = 1; // maps to id of Call from call log
Call call = 1;
oneof chatUpdate {
IndividualCallChatUpdate callMessage = 2;
GroupCallChatUpdate groupCall = 3;
}
}
message Call {
enum Type {
UNKNOWN_TYPE = 0;
AUDIO_CALL = 1;
VIDEO_CALL = 2;
GROUP_CALL = 3;
AD_HOC_CALL = 4;
}
enum State {
UNKNOWN_EVENT = 0;
COMPLETED = 1; // A call that was successfully completed or was accepted and in-progress at the time of the backup.
DECLINED_BY_USER = 2; // An incoming call that was manually declined by the user.
DECLINED_BY_NOTIFICATION_PROFILE = 3; // An incoming call that was automatically declined by an active notification profile.
MISSED = 4; // An incoming call that either expired, was cancelled by the sender, or was auto-rejected due to already being in a different call.
}
uint64 callId = 1;
uint64 conversationRecipientId = 2;
Type type = 3;
bool outgoing = 4;
uint64 timestamp = 5;
optional uint64 ringerRecipientId = 6;
State state = 7;
}
message IndividualCallChatUpdate {
enum Type {
UNKNOWN = 0;

View file

@ -12,13 +12,15 @@ import {
} from 'crypto';
import type { Decipher, Hash, Hmac } from 'crypto';
import { Transform } from 'stream';
import type { Readable } from 'stream';
import { pipeline } from 'stream/promises';
import { ensureFile } from 'fs-extra';
import * as log from './logging/log';
import { HashType, CipherType } from './types/Crypto';
import { createName, getRelativePath } from './windows/attachments';
import { constantTimeEqual, getAttachmentSizeBucket } from './Crypto';
import { constantTimeEqual } from './Crypto';
import { appendPaddingStream, logPadSize } from './util/logPadding';
import { prependStream } from './util/prependStream';
import { appendMacStream } from './util/appendMacStream';
import { Environment } from './environment';
import type { AttachmentType } from './types/Attachment';
import type { ContextType } from './types/Message2';
@ -96,10 +98,10 @@ export async function encryptAttachmentV2({
await pipeline(
readFd.createReadStream(),
peekAndUpdateHash(plaintextHash),
appendPadding(),
appendPaddingStream(),
createCipheriv(CipherType.AES256CBC, aesKey, iv),
prependIv(iv),
appendMac(macKey),
appendMacStream(macKey),
peekAndUpdateHash(digest),
writeFd.createWriteStream()
);
@ -266,10 +268,10 @@ function peekAndUpdateHash(hash: Hash) {
* Updates an hmac with the stream except for the last ATTACHMENT_MAC_LENGTH
* bytes. The last ATTACHMENT_MAC_LENGTH bytes are passed to the callback.
*/
function getMacAndUpdateHmac(
export function getMacAndUpdateHmac(
hmac: Hmac,
onTheirMac: (theirMac: Uint8Array) => void
) {
): Transform {
// Because we don't have a view of the entire stream, we don't know when we're
// at the end. We need to omit the last ATTACHMENT_MAC_LENGTH bytes from
// `hmac.update` so we only push what we know is not the mac.
@ -310,7 +312,7 @@ function getMacAndUpdateHmac(
* Gets the IV from the start of the stream and creates a decipher.
* Then deciphers the rest of the stream.
*/
function getIvAndDecipher(aesKey: Uint8Array) {
export function getIvAndDecipher(aesKey: Uint8Array): Transform {
let maybeIvBytes: Buffer | null = Buffer.alloc(0);
let decipher: Decipher | null = null;
return new Transform({
@ -377,83 +379,10 @@ function trimPadding(size: number) {
export function getAttachmentDownloadSize(size: number): number {
return (
// Multiply this by 1.05 to allow some variance
getAttachmentSizeBucket(size) * 1.05 + IV_LENGTH + ATTACHMENT_MAC_LENGTH
logPadSize(size) * 1.05 + IV_LENGTH + ATTACHMENT_MAC_LENGTH
);
}
const PADDING_CHUNK_SIZE = 64 * 1024;
/**
* Creates iterator that yields zero-filled padding chunks.
*/
function* generatePadding(size: number) {
const targetLength = getAttachmentSizeBucket(size);
const paddingSize = targetLength - size;
const paddingChunks = Math.floor(paddingSize / PADDING_CHUNK_SIZE);
const paddingChunk = new Uint8Array(PADDING_CHUNK_SIZE); // zero-filled
const paddingRemainder = new Uint8Array(paddingSize % PADDING_CHUNK_SIZE);
for (let i = 0; i < paddingChunks; i += 1) {
yield paddingChunk;
}
if (paddingRemainder.byteLength > 0) {
yield paddingRemainder;
}
}
// Push as much padding as we can. If we reach the end
// of the padding, return true.
function pushPadding(
paddingIterator: Iterator<Uint8Array>,
readable: Readable
): boolean {
// eslint-disable-next-line no-constant-condition
while (true) {
const result = paddingIterator.next();
if (result.done) {
break;
}
const keepGoing = readable.push(result.value);
if (!keepGoing) {
return false;
}
}
return true;
}
/**
* Appends zero-padding to the stream to a target bucket size.
*/
function appendPadding() {
let onReadableDrained: undefined | (() => void);
let fileSize = 0;
return new Transform({
read(size) {
// When in the process of pushing padding, we pause and wait for
// read to be called again.
if (onReadableDrained != null) {
onReadableDrained();
}
// Always call _read, even if we're done.
Transform.prototype._read.call(this, size);
},
transform(chunk, _encoding, callback) {
fileSize += chunk.byteLength;
callback(null, chunk);
},
flush(callback) {
const iterator = generatePadding(fileSize);
onReadableDrained = () => {
if (pushPadding(iterator, this)) {
callback();
}
};
onReadableDrained();
},
});
}
/**
* Prepends the iv to the stream.
*/
@ -462,43 +391,7 @@ function prependIv(iv: Uint8Array) {
iv.byteLength === IV_LENGTH,
`prependIv: iv should be ${IV_LENGTH} bytes, got ${iv.byteLength} bytes`
);
return new Transform({
construct(callback) {
this.push(iv);
callback();
},
transform(chunk, _encoding, callback) {
callback(null, chunk);
},
});
}
/**
* Appends the mac to the end of the stream.
*/
function appendMac(macKey: Uint8Array) {
strictAssert(
macKey.byteLength === KEY_LENGTH,
`macKey should be ${KEY_LENGTH} bytes, got ${macKey.byteLength} bytes`
);
const hmac = createHmac(HashType.size256, macKey);
return new Transform({
transform(chunk, _encoding, callback) {
try {
hmac.update(chunk);
callback(null, chunk);
} catch (error) {
callback(error);
}
},
flush(callback) {
try {
callback(null, hmac.digest());
} catch (error) {
callback(error);
}
},
});
return prependStream(iv);
}
/**

View file

@ -7,9 +7,10 @@ import { HKDF } from '@signalapp/libsignal-client';
import * as Bytes from './Bytes';
import { calculateAgreement, generateKeyPair } from './Curve';
import { HashType, CipherType } from './types/Crypto';
import { HashType, CipherType, UUID_BYTE_SIZE } from './types/Crypto';
import { ProfileDecryptError } from './types/errors';
import { getBytesSubarray } from './util/uuidToBytes';
import { logPadSize } from './util/logPadding';
import { Environment } from './environment';
export { HashType, CipherType };
@ -140,6 +141,78 @@ export function deriveStorageManifestKey(
return hmacSha256(storageServiceKey, Bytes.fromString(`Manifest_${version}`));
}
const BACKUP_KEY_LEN = 32;
const BACKUP_KEY_INFO = '20231003_Signal_Backups_GenerateBackupKey';
export function deriveBackupKey(masterKey: Uint8Array): Uint8Array {
const hkdf = HKDF.new(3);
return hkdf.deriveSecrets(
BACKUP_KEY_LEN,
Buffer.from(masterKey),
Buffer.from(BACKUP_KEY_INFO),
Buffer.alloc(0)
);
}
const BACKUP_ID_LEN = 16;
const BACKUP_ID_INFO = '20231003_Signal_Backups_GenerateBackupId';
export function deriveBackupId(
backupKey: Uint8Array,
aciBytes: Uint8Array
): Uint8Array {
if (backupKey.byteLength !== BACKUP_KEY_LEN) {
throw new Error('deriveBackupId: invalid backup key length');
}
if (aciBytes.byteLength !== UUID_BYTE_SIZE) {
throw new Error('deriveBackupId: invalid aci length');
}
const hkdf = HKDF.new(3);
return hkdf.deriveSecrets(
BACKUP_ID_LEN,
Buffer.from(backupKey),
Buffer.from(BACKUP_ID_INFO),
Buffer.from(aciBytes)
);
}
export type BackupKeyMaterialType = Readonly<{
macKey: Uint8Array;
aesKey: Uint8Array;
}>;
const BACKUP_AES_KEY_LEN = 32;
const BACKUP_MAC_KEY_LEN = 32;
const BACKUP_MATERIAL_INFO = '20231003_Signal_Backups_EncryptMessageBackup';
export function deriveBackupKeyMaterial(
backupKey: Uint8Array,
backupId: Uint8Array
): BackupKeyMaterialType {
if (backupKey.byteLength !== BACKUP_KEY_LEN) {
throw new Error('deriveBackupId: invalid backup key length');
}
if (backupId.byteLength !== BACKUP_ID_LEN) {
throw new Error('deriveBackupId: invalid backup id length');
}
const hkdf = HKDF.new(3);
const material = hkdf.deriveSecrets(
BACKUP_AES_KEY_LEN + BACKUP_MAC_KEY_LEN,
Buffer.from(backupKey),
Buffer.from(BACKUP_MATERIAL_INFO),
Buffer.from(backupId)
);
return {
macKey: material.slice(0, BACKUP_MAC_KEY_LEN),
aesKey: material.slice(BACKUP_MAC_KEY_LEN),
};
}
export function deriveStorageItemKey(
storageServiceKey: Uint8Array,
itemID: string
@ -462,13 +535,6 @@ export function encryptAttachment({
};
}
export function getAttachmentSizeBucket(size: number): number {
return Math.max(
541,
Math.floor(1.05 ** Math.ceil(Math.log(size) / Math.log(1.05)))
);
}
export function padAndEncryptAttachment({
plaintext,
keys,
@ -479,7 +545,7 @@ export function padAndEncryptAttachment({
dangerousTestOnlyIv?: Readonly<Uint8Array>;
}): EncryptedAttachment {
const size = plaintext.byteLength;
const paddedSize = getAttachmentSizeBucket(size);
const paddedSize = logPadSize(size);
const padding = getZeroes(paddedSize - size);
return {

View file

@ -184,9 +184,17 @@ export class BackupExportStream extends Readable {
stats.distributionLists += 1;
}
const pinnedConversationIds =
window.storage.get('pinnedConversationIds') || [];
for (const { attributes } of window.ConversationController.getAll()) {
const recipientId = this.getRecipientId(attributes);
let pinnedOrder: number | null = null;
if (attributes.isPinned) {
pinnedOrder = Math.max(0, pinnedConversationIds.indexOf(attributes.id));
}
this.pushFrame({
chat: {
// We don't have to use separate identifiers
@ -194,7 +202,7 @@ export class BackupExportStream extends Readable {
recipientId,
archived: attributes.isArchived === true,
pinnedOrder: attributes.isPinned === true ? 1 : null,
pinnedOrder,
expirationTimerMs:
attributes.expireTimer != null
? Long.fromNumber(
@ -588,7 +596,10 @@ export class BackupExportStream extends Readable {
const { sendStateByConversationId = {} } = message;
for (const [id, entry] of Object.entries(sendStateByConversationId)) {
const target = window.ConversationController.get(id);
strictAssert(target != null, 'Send target not found');
if (!target) {
log.warn(`backups: no send target for a message ${message.sent_at}`);
continue;
}
let deliveryStatus: Backups.SendStatus.Status;
switch (entry.status) {

View file

@ -28,6 +28,8 @@ import {
import { incrementMessageCounter } from '../../util/incrementMessageCounter';
import { isAciString } from '../../util/isAciString';
import { createBatcher } from '../../util/batcher';
import { PhoneNumberDiscoverability } from '../../util/phoneNumberDiscoverability';
import { PhoneNumberSharingMode } from '../../util/phoneNumberSharingMode';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { SendStatus } from '../../messages/MessageSendState';
import type { SendStateByConversationId } from '../../messages/MessageSendState';
@ -97,6 +99,7 @@ export class BackupImportStream extends Writable {
},
});
private ourConversation?: ConversationAttributesType;
private pinnedConversations = new Array<[number, string]>();
constructor() {
super({ objectMode: true });
@ -157,6 +160,15 @@ export class BackupImportStream extends Writable {
{ concurrency: MAX_CONCURRENCY }
);
await window.storage.put(
'pinnedConversationIds',
this.pinnedConversations
.sort(([a], [b]) => {
return a - b;
})
.map(([, id]) => id)
);
done();
} catch (error) {
done(error);
@ -226,10 +238,145 @@ export class BackupImportStream extends Writable {
this.saveMessageBatcher.add(attributes);
}
private async fromAccount(_account: Backups.IAccountData): Promise<void> {
private async fromAccount({
profileKey,
username,
usernameLink,
givenName,
familyName,
avatarUrlPath,
subscriberId,
subscriberCurrencyCode,
accountSettings,
}: Backups.IAccountData): Promise<void> {
strictAssert(this.ourConversation === undefined, 'Duplicate AccountData');
this.ourConversation =
const me =
window.ConversationController.getOurConversationOrThrow().attributes;
this.ourConversation = me;
const { storage } = window;
strictAssert(Bytes.isNotEmpty(profileKey), 'Missing profile key');
await storage.put('profileKey', profileKey);
if (username != null) {
me.username = username;
}
if (usernameLink != null) {
const { entropy, serverId, color } = usernameLink;
if (Bytes.isNotEmpty(entropy) && Bytes.isNotEmpty(serverId)) {
await storage.put('usernameLink', {
entropy,
serverId,
});
}
// Same numeric value, no conversion needed
await storage.put('usernameLinkColor', color ?? 0);
}
if (givenName != null) {
me.profileName = givenName;
}
if (familyName != null) {
me.profileFamilyName = familyName;
}
if (avatarUrlPath != null) {
await storage.put('avatarUrl', avatarUrlPath);
}
if (subscriberId != null) {
await storage.put('subscriberId', subscriberId);
}
if (subscriberCurrencyCode != null) {
await storage.put('subscriberCurrencyCode', subscriberCurrencyCode);
}
await storage.put(
'read-receipt-setting',
accountSettings?.readReceipts === true
);
await storage.put(
'sealedSenderIndicators',
accountSettings?.sealedSenderIndicators === true
);
await storage.put(
'typingIndicators',
accountSettings?.typingIndicators === true
);
await storage.put('linkPreviews', accountSettings?.linkPreviews === true);
await storage.put(
'preferContactAvatars',
accountSettings?.preferContactAvatars === true
);
await storage.put(
'displayBadgesOnProfile',
accountSettings?.displayBadgesOnProfile === true
);
await storage.put(
'keepMutedChatsArchived',
accountSettings?.keepMutedChatsArchived === true
);
await storage.put(
'hasSetMyStoriesPrivacy',
accountSettings?.hasSetMyStoriesPrivacy === true
);
await storage.put(
'hasViewedOnboardingStory',
accountSettings?.hasViewedOnboardingStory === true
);
await storage.put(
'hasStoriesDisabled',
accountSettings?.storiesDisabled === true
);
await storage.put(
'storyViewReceiptsEnabled',
accountSettings?.storyViewReceiptsEnabled === true
);
await storage.put(
'hasCompletedUsernameOnboarding',
accountSettings?.hasCompletedUsernameOnboarding === true
);
await storage.put(
'preferredReactionEmoji',
accountSettings?.preferredReactionEmoji || []
);
await storage.put(
'preferredReactionEmoji',
accountSettings?.preferredReactionEmoji || []
);
const { PhoneNumberSharingMode: BackupMode } = Backups.AccountData;
switch (accountSettings?.phoneNumberSharingMode) {
case BackupMode.EVERYBODY:
await storage.put(
'phoneNumberSharingMode',
PhoneNumberSharingMode.Everybody
);
break;
case BackupMode.UNKNOWN:
case BackupMode.NOBODY:
default:
await storage.put(
'phoneNumberSharingMode',
PhoneNumberSharingMode.Nobody
);
break;
}
if (accountSettings?.notDiscoverableByPhoneNumber) {
await window.storage.put(
'phoneNumberDiscoverability',
PhoneNumberDiscoverability.NotDiscoverable
);
} else {
await window.storage.put(
'phoneNumberDiscoverability',
PhoneNumberDiscoverability.Discoverable
);
}
this.updateConversation(me);
}
private async fromContact(
@ -342,15 +489,7 @@ export class BackupImportStream extends Writable {
this.updateConversation(conversation);
if (chat.pinnedOrder != null) {
const pinnedConversationIds = new Set(
window.storage.get('pinnedConversationIds', new Array<string>())
);
pinnedConversationIds.add(conversation.id);
await window.storage.put('pinnedConversationIds', [
...pinnedConversationIds,
]);
this.pinnedConversations.push([chat.pinnedOrder, conversation.id]);
}
}

View file

@ -2,20 +2,53 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { pipeline } from 'stream/promises';
import { Readable } from 'stream';
import { PassThrough } from 'stream';
import type { Readable, Writable } from 'stream';
import { createWriteStream } from 'fs';
import { createGzip, createGunzip } from 'zlib';
import { createCipheriv, createHmac, randomBytes } from 'crypto';
import { noop } from 'lodash';
import * as log from '../../logging/log';
import * as Bytes from '../../Bytes';
import { DelimitedStream } from '../../util/DelimitedStream';
import { appendPaddingStream } from '../../util/logPadding';
import { prependStream } from '../../util/prependStream';
import { appendMacStream } from '../../util/appendMacStream';
import { toAciObject } from '../../util/ServiceId';
import { CipherType, HashType } from '../../types/Crypto';
import * as Errors from '../../types/errors';
import {
deriveBackupKey,
deriveBackupId,
deriveBackupKeyMaterial,
constantTimeEqual,
} from '../../Crypto';
import type { BackupKeyMaterialType } from '../../Crypto';
import { getIvAndDecipher, getMacAndUpdateHmac } from '../../AttachmentCrypto';
import { BackupExportStream } from './export';
import { BackupImportStream } from './import';
const IV_LENGTH = 16;
function getKeyMaterial(): BackupKeyMaterialType {
const masterKey = window.storage.get('masterKey');
if (!masterKey) {
throw new Error('Master key not available');
}
const aci = toAciObject(window.storage.user.getCheckedAci());
const aciBytes = aci.getServiceIdBinary();
const backupKey = deriveBackupKey(Bytes.fromBase64(masterKey));
const backupId = deriveBackupId(backupKey, aciBytes);
return deriveBackupKeyMaterial(backupKey, backupId);
}
export class BackupsService {
private isRunning = false;
public exportBackup(): Readable {
public async exportBackup(sink: Writable): Promise<void> {
if (this.isRunning) {
throw new Error('BackupService is already running');
}
@ -23,37 +56,43 @@ export class BackupsService {
log.info('exportBackup: starting...');
this.isRunning = true;
const stream = new BackupExportStream();
const cleanup = () => {
// Don't fire twice
stream.removeListener('end', cleanup);
stream.removeListener('error', cleanup);
try {
const { aesKey, macKey } = getKeyMaterial();
const recordStream = new BackupExportStream();
recordStream.run();
const iv = randomBytes(IV_LENGTH);
await pipeline(
recordStream,
createGzip(),
appendPaddingStream(),
createCipheriv(CipherType.AES256CBC, aesKey, iv),
prependStream(iv),
appendMacStream(macKey),
sink
);
} finally {
log.info('exportBackup: finished...');
this.isRunning = false;
};
stream.once('end', cleanup);
stream.once('error', cleanup);
stream.run();
return stream;
}
}
// Test harness
public async exportBackupData(): Promise<Uint8Array> {
const sink = new PassThrough();
const chunks = new Array<Uint8Array>();
for await (const chunk of this.exportBackup()) {
chunks.push(chunk);
}
sink.on('data', chunk => chunks.push(chunk));
await this.exportBackup(sink);
return Bytes.concatenate(chunks);
}
// Test harness
public async exportToDisk(path: string): Promise<void> {
await pipeline(this.exportBackup(), createWriteStream(path));
await this.exportBackup(createWriteStream(path));
}
// Test harness
@ -68,7 +107,7 @@ export class BackupsService {
});
}
public async importBackup(backup: Uint8Array): Promise<void> {
public async importBackup(createBackupStream: () => Readable): Promise<void> {
if (this.isRunning) {
throw new Error('BackupService is already running');
}
@ -77,11 +116,48 @@ export class BackupsService {
this.isRunning = true;
try {
const { aesKey, macKey } = getKeyMaterial();
// First pass - don't decrypt, only verify mac
let hmac = createHmac(HashType.size256, macKey);
let theirMac: Uint8Array | undefined;
const sink = new PassThrough();
// Discard the data in the first pass
sink.resume();
await pipeline(
Readable.from(backup),
createBackupStream(),
getMacAndUpdateHmac(hmac, theirMacValue => {
theirMac = theirMacValue;
}),
sink
);
if (theirMac == null) {
throw new Error('importBackup: Missing MAC');
}
if (!constantTimeEqual(hmac.digest(), theirMac)) {
throw new Error('importBackup: Bad MAC');
}
// Second pass - decrypt (but still check the mac at the end)
hmac = createHmac(HashType.size256, macKey);
await pipeline(
createBackupStream(),
getMacAndUpdateHmac(hmac, noop),
getIvAndDecipher(aesKey),
createGunzip(),
new DelimitedStream(),
new BackupImportStream()
);
if (!constantTimeEqual(hmac.digest(), theirMac)) {
throw new Error('importBackup: Bad MAC, second pass');
}
log.info('importBackup: finished...');
} catch (error) {
log.info(`importBackup: failed, error: ${Errors.toLogFormat(error)}`);

View file

@ -0,0 +1,47 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { randomBytes, createHmac } from 'crypto';
import {
appendMacStream,
MAC_KEY_SIZE,
MAC_SIZE,
} from '../../util/appendMacStream';
import { generateConfigMatrix } from '../../util/generateConfigMatrix';
describe('appendMacStream', () => {
generateConfigMatrix({
size: [23, 1024, 1024 * 1024],
}).forEach(({ size }) => {
it(`should append mac to a ${size} byte stream`, async () => {
const macKey = randomBytes(MAC_KEY_SIZE);
const plaintext = randomBytes(size);
const stream = appendMacStream(macKey);
stream.end(plaintext);
const chunks = new Array<Buffer>();
for await (const chunk of stream) {
chunks.push(chunk);
}
const buf = Buffer.concat(chunks);
const hmac = createHmac('sha256', macKey);
hmac.update(plaintext);
const expectedMac = hmac.digest();
assert.strictEqual(
buf.slice(0, -MAC_SIZE).toString('hex'),
plaintext.toString('hex')
);
assert.strictEqual(
buf.slice(-MAC_SIZE).toString('hex'),
expectedMac.toString('hex')
);
});
});
});

View file

@ -0,0 +1,123 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { Readable } from 'stream';
import { logPadSize, appendPaddingStream } from '../../util/logPadding';
const BUCKET_SIZES = [
541, 568, 596, 626, 657, 690, 725, 761, 799, 839, 881, 925, 972, 1020, 1071,
1125, 1181, 1240, 1302, 1367, 1436, 1507, 1583, 1662, 1745, 1832, 1924, 2020,
2121, 2227, 2339, 2456, 2579, 2708, 2843, 2985, 3134, 3291, 3456, 3629, 3810,
4001, 4201, 4411, 4631, 4863, 5106, 5361, 5629, 5911, 6207, 6517, 6843, 7185,
7544, 7921, 8318, 8733, 9170, 9629, 10110, 10616, 11146, 11704, 12289, 12903,
13549, 14226, 14937, 15684, 16469, 17292, 18157, 19065, 20018, 21019, 22070,
23173, 24332, 25549, 26826, 28167, 29576, 31054, 32607, 34238, 35950, 37747,
39634, 41616, 43697, 45882, 48176, 50585, 53114, 55770, 58558, 61486, 64561,
67789, 71178, 74737, 78474, 82398, 86518, 90843, 95386, 100155, 105163,
110421, 115942, 121739, 127826, 134217, 140928, 147975, 155373, 163142,
171299, 179864, 188858, 198300, 208215, 218626, 229558, 241036, 253087,
265742, 279029, 292980, 307629, 323011, 339161, 356119, 373925, 392622,
412253, 432866, 454509, 477234, 501096, 526151, 552458, 580081, 609086,
639540, 671517, 705093, 740347, 777365, 816233, 857045, 899897, 944892,
992136, 1041743, 1093831, 1148522, 1205948, 1266246, 1329558, 1396036,
1465838, 1539130, 1616086, 1696890, 1781735, 1870822, 1964363, 2062581,
2165710, 2273996, 2387695, 2507080, 2632434, 2764056, 2902259, 3047372,
3199740, 3359727, 3527714, 3704100, 3889305, 4083770, 4287958, 4502356,
4727474, 4963848, 5212040, 5472642, 5746274, 6033588, 6335268, 6652031,
6984633, 7333864, 7700558, 8085585, 8489865, 8914358, 9360076, 9828080,
10319484, 10835458, 11377231, 11946092, 12543397, 13170567, 13829095,
14520550, 15246578, 16008907, 16809352, 17649820, 18532311, 19458926,
20431872, 21453466, 22526139, 23652446, 24835069, 26076822, 27380663,
28749697, 30187181, 31696540, 33281368, 34945436, 36692708, 38527343,
40453710, 42476396, 44600216, 46830227, 49171738, 51630325, 54211841,
56922433, 59768555, 62756983, 65894832, 69189573, 72649052, 76281505,
80095580, 84100359, 88305377, 92720646, 97356678, 102224512, 107335738,
];
describe('logPadSize', () => {
it('properly calculates first bucket', () => {
for (let size = 0, max = BUCKET_SIZES[0]; size < max; size += 1) {
assert.strictEqual(BUCKET_SIZES[0], logPadSize(size));
}
});
it('properly calculates entire table', () => {
let count = 0;
const failures = new Array<string>();
for (let i = 0, max = BUCKET_SIZES.length - 1; i < max; i += 1) {
// Exact
if (BUCKET_SIZES[i] !== logPadSize(BUCKET_SIZES[i])) {
count += 1;
failures.push(
`${BUCKET_SIZES[i]} does not equal ${logPadSize(BUCKET_SIZES[i])}`
);
}
// Just under
if (BUCKET_SIZES[i] !== logPadSize(BUCKET_SIZES[i] - 1)) {
count += 1;
failures.push(
`${BUCKET_SIZES[i]} does not equal ${logPadSize(BUCKET_SIZES[i] - 1)}`
);
}
// Just over
if (BUCKET_SIZES[i + 1] !== logPadSize(BUCKET_SIZES[i] + 1)) {
count += 1;
failures.push(
`${BUCKET_SIZES[i + 1]} does not equal ` +
`${logPadSize(BUCKET_SIZES[i] + 1)}`
);
}
}
assert.strictEqual(count, 0, failures.join('\n'));
});
});
describe('appendPaddingStream', () => {
async function check(
inputs: ReadonlyArray<string>,
expectedSize: number
): Promise<void> {
const stream = appendPaddingStream();
Readable.from(inputs).pipe(stream);
const chunks = [];
for await (const chunk of stream) {
chunks.push(chunk);
}
const buf = Buffer.concat(chunks);
// Determine padding length
let padding = 0;
for (; padding < buf.length; padding += 1) {
if (buf[buf.length - padding - 1] !== 0) {
break;
}
}
assert.strictEqual(buf.slice(0, -padding).toString(), inputs.join(''));
assert.strictEqual(buf.length, expectedSize);
}
it('should append padding to a short input', async () => {
await check(['hello'], BUCKET_SIZES[0]);
});
it('should append padding to a longer input', async () => {
await check('test.'.repeat(1024).split('.'), BUCKET_SIZES[42]);
});
it('should append padding to a very long input', async () => {
await check(
`${'a'.repeat(64 * 1024)}.`.repeat(1024).split('.'),
BUCKET_SIZES[241]
);
});
});

View file

@ -0,0 +1,21 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { prependStream } from '../../util/prependStream';
describe('prependStream', () => {
it('should prepend stream with a prefix', async () => {
const stream = prependStream(Buffer.from('prefix:'));
stream.end('hello');
const chunks = new Array<Buffer>();
for await (const chunk of stream) {
chunks.push(chunk);
}
const buf = Buffer.concat(chunks);
assert.strictEqual(buf.toString(), 'prefix:hello');
});
});

View file

@ -15,7 +15,6 @@ import {
decryptProfileName,
encryptProfile,
decryptProfile,
getAttachmentSizeBucket,
getRandomBytes,
constantTimeEqual,
generateRegistrationId,
@ -45,36 +44,6 @@ import {
import { createTempDir, deleteTempDir } from '../updater/common';
import { uuidToBytes, bytesToUuid } from '../util/uuidToBytes';
const BUCKET_SIZES = [
541, 568, 596, 626, 657, 690, 725, 761, 799, 839, 881, 925, 972, 1020, 1071,
1125, 1181, 1240, 1302, 1367, 1436, 1507, 1583, 1662, 1745, 1832, 1924, 2020,
2121, 2227, 2339, 2456, 2579, 2708, 2843, 2985, 3134, 3291, 3456, 3629, 3810,
4001, 4201, 4411, 4631, 4863, 5106, 5361, 5629, 5911, 6207, 6517, 6843, 7185,
7544, 7921, 8318, 8733, 9170, 9629, 10110, 10616, 11146, 11704, 12289, 12903,
13549, 14226, 14937, 15684, 16469, 17292, 18157, 19065, 20018, 21019, 22070,
23173, 24332, 25549, 26826, 28167, 29576, 31054, 32607, 34238, 35950, 37747,
39634, 41616, 43697, 45882, 48176, 50585, 53114, 55770, 58558, 61486, 64561,
67789, 71178, 74737, 78474, 82398, 86518, 90843, 95386, 100155, 105163,
110421, 115942, 121739, 127826, 134217, 140928, 147975, 155373, 163142,
171299, 179864, 188858, 198300, 208215, 218626, 229558, 241036, 253087,
265742, 279029, 292980, 307629, 323011, 339161, 356119, 373925, 392622,
412253, 432866, 454509, 477234, 501096, 526151, 552458, 580081, 609086,
639540, 671517, 705093, 740347, 777365, 816233, 857045, 899897, 944892,
992136, 1041743, 1093831, 1148522, 1205948, 1266246, 1329558, 1396036,
1465838, 1539130, 1616086, 1696890, 1781735, 1870822, 1964363, 2062581,
2165710, 2273996, 2387695, 2507080, 2632434, 2764056, 2902259, 3047372,
3199740, 3359727, 3527714, 3704100, 3889305, 4083770, 4287958, 4502356,
4727474, 4963848, 5212040, 5472642, 5746274, 6033588, 6335268, 6652031,
6984633, 7333864, 7700558, 8085585, 8489865, 8914358, 9360076, 9828080,
10319484, 10835458, 11377231, 11946092, 12543397, 13170567, 13829095,
14520550, 15246578, 16008907, 16809352, 17649820, 18532311, 19458926,
20431872, 21453466, 22526139, 23652446, 24835069, 26076822, 27380663,
28749697, 30187181, 31696540, 33281368, 34945436, 36692708, 38527343,
40453710, 42476396, 44600216, 46830227, 49171738, 51630325, 54211841,
56922433, 59768555, 62756983, 65894832, 69189573, 72649052, 76281505,
80095580, 84100359, 88305377, 92720646, 97356678, 102224512, 107335738,
];
const GHOST_KITTY_HASH =
'7bc77f27d92d00b4a1d57c480ca86dacc43d57bc318339c92119d1fbf6b557a5';
@ -556,55 +525,6 @@ describe('Crypto', () => {
});
});
describe('getAttachmentSizeBucket', () => {
it('properly calculates first bucket', () => {
for (let size = 0, max = BUCKET_SIZES[0]; size < max; size += 1) {
assert.strictEqual(BUCKET_SIZES[0], getAttachmentSizeBucket(size));
}
});
it('properly calculates entire table', () => {
let count = 0;
const failures = new Array<string>();
for (let i = 0, max = BUCKET_SIZES.length - 1; i < max; i += 1) {
// Exact
if (BUCKET_SIZES[i] !== getAttachmentSizeBucket(BUCKET_SIZES[i])) {
count += 1;
failures.push(
`${BUCKET_SIZES[i]} does not equal ${getAttachmentSizeBucket(
BUCKET_SIZES[i]
)}`
);
}
// Just under
if (BUCKET_SIZES[i] !== getAttachmentSizeBucket(BUCKET_SIZES[i] - 1)) {
count += 1;
failures.push(
`${BUCKET_SIZES[i]} does not equal ${getAttachmentSizeBucket(
BUCKET_SIZES[i] - 1
)}`
);
}
// Just over
if (
BUCKET_SIZES[i + 1] !== getAttachmentSizeBucket(BUCKET_SIZES[i] + 1)
) {
count += 1;
failures.push(
`${BUCKET_SIZES[i + 1]} does not equal ${getAttachmentSizeBucket(
BUCKET_SIZES[i] + 1
)}`
);
}
}
assert.strictEqual(count, 0, failures.join('\n'));
});
});
describe('attachments', () => {
const FILE_PATH = join(__dirname, '../../fixtures/ghost-kitty.mp4');
const FILE_CONTENTS = readFileSync(FILE_PATH);

View file

@ -3,6 +3,7 @@
import createDebug from 'debug';
import Long from 'long';
import { StorageState } from '@signalapp/mock-server';
import * as durations from '../../util/durations';
import type { App } from '../playwright';
@ -19,6 +20,34 @@ describe('backups', function (this: Mocha.Suite) {
beforeEach(async () => {
bootstrap = new Bootstrap();
await bootstrap.init();
let state = StorageState.getEmpty();
const { phone, contacts } = bootstrap;
const [friend, pinned] = contacts;
state = state.updateAccount({
profileKey: phone.profileKey.serialize(),
e164: phone.device.number,
givenName: phone.profileName,
readReceipts: true,
hasCompletedUsernameOnboarding: true,
});
state = state.addContact(friend, {
identityKey: friend.publicKey.serialize(),
profileKey: friend.profileKey.serialize(),
});
state = state.addContact(pinned, {
identityKey: pinned.publicKey.serialize(),
profileKey: pinned.profileKey.serialize(),
});
state = state.pin(pinned);
await phone.setStorageState(state);
app = await bootstrap.link();
});
@ -34,7 +63,19 @@ describe('backups', function (this: Mocha.Suite) {
it('exports and imports backup', async function () {
const { contacts, phone, desktop, server } = bootstrap;
const [friend] = contacts;
const [friend, pinned] = contacts;
debug('wait for storage service sync to finish');
{
const window = await app.getWindow();
const leftPane = window.locator('#LeftPane');
await leftPane
.locator(
`[data-testid="${pinned.device.aci}"] >> "${pinned.profileName}"`
)
.waitFor();
}
for (let i = 0; i < 5; i += 1) {
const theirTimestamp = bootstrap.getTimestamp();

View file

@ -4,6 +4,7 @@
import PQueue from 'p-queue';
import { isNumber, omit, orderBy } from 'lodash';
import type { KyberPreKeyRecord } from '@signalapp/libsignal-client';
import { Readable } from 'stream';
import EventTarget from './EventTarget';
import type {
@ -1331,7 +1332,7 @@ export default class AccountManager extends EventTarget {
]);
if (backupFile !== undefined) {
await backupsService.importBackup(backupFile);
await backupsService.importBackup(() => Readable.from(backupFile));
}
}

View file

@ -0,0 +1,37 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { createHmac } from 'crypto';
import { Transform } from 'stream';
import type { Duplex } from 'stream';
import { HashType } from '../types/Crypto';
export const MAC_KEY_SIZE = 32;
export const MAC_SIZE = 32;
export function appendMacStream(macKey: Uint8Array): Duplex {
if (macKey.byteLength !== MAC_KEY_SIZE) {
throw new Error('appendMacStream: invalid macKey length');
}
const hmac = createHmac(HashType.size256, macKey);
return new Transform({
transform(chunk, _encoding, callback) {
try {
hmac.update(chunk);
callback(null, chunk);
} catch (error) {
callback(error);
}
},
flush(callback) {
try {
callback(null, hmac.digest());
} catch (error) {
callback(error);
}
},
});
}

88
ts/util/logPadding.ts Normal file
View file

@ -0,0 +1,88 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { Transform } from 'stream';
import type { Duplex, Readable } from 'stream';
const PADDING_CHUNK_SIZE = 64 * 1024;
export function logPadSize(size: number): number {
return Math.max(
541,
Math.floor(1.05 ** Math.ceil(Math.log(size) / Math.log(1.05)))
);
}
/**
* Creates iterator that yields zero-filled padding chunks.
*/
function* generatePadding(size: number) {
const targetLength = logPadSize(size);
const paddingSize = targetLength - size;
const paddingChunks = Math.floor(paddingSize / PADDING_CHUNK_SIZE);
const paddingChunk = new Uint8Array(PADDING_CHUNK_SIZE); // zero-filled
for (let i = 0; i < paddingChunks; i += 1) {
yield paddingChunk;
}
const paddingRemainder = new Uint8Array(paddingSize % PADDING_CHUNK_SIZE);
if (paddingRemainder.byteLength > 0) {
yield paddingRemainder;
}
}
// Push as much padding as we can. If we reach the end
// of the padding, return true.
function pushPadding(
paddingIterator: Iterator<Uint8Array>,
readable: Readable
): boolean {
// eslint-disable-next-line no-constant-condition
while (true) {
const result = paddingIterator.next();
if (result.done) {
break;
}
const keepGoing = readable.push(result.value);
if (!keepGoing) {
return false;
}
}
return true;
}
/**
* Appends zero-padding to the stream to a target bucket size.
*/
export function appendPaddingStream(): Duplex {
let onReadableDrained: undefined | (() => void);
let fileSize = 0;
return new Transform({
read(size) {
// When in the process of pushing padding, we pause and wait for
// read to be called again.
if (onReadableDrained != null) {
onReadableDrained();
}
// Always call _read, even if we're done.
Transform.prototype._read.call(this, size);
},
transform(chunk, _encoding, callback) {
fileSize += chunk.byteLength;
callback(null, chunk);
},
flush(callback) {
const iterator = generatePadding(fileSize);
onReadableDrained = () => {
if (!pushPadding(iterator, this)) {
return;
}
callback();
};
onReadableDrained();
},
});
}

17
ts/util/prependStream.ts Normal file
View file

@ -0,0 +1,17 @@
// Copyright 2024 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { Transform } from 'stream';
import type { Duplex } from 'stream';
export function prependStream(data: Uint8Array): Duplex {
return new Transform({
construct(callback) {
this.push(data);
callback();
},
transform(chunk, _encoding, callback) {
callback(null, chunk);
},
});
}