Differential updates
This commit is contained in:
parent
c11e9350d5
commit
f58d1332c4
12 changed files with 873 additions and 153 deletions
BIN
fixtures/diff-modified.bin
Normal file
BIN
fixtures/diff-modified.bin
Normal file
Binary file not shown.
BIN
fixtures/diff-modified.bin.blockmap
Normal file
BIN
fixtures/diff-modified.bin.blockmap
Normal file
Binary file not shown.
BIN
fixtures/diff-original.bin
Normal file
BIN
fixtures/diff-original.bin
Normal file
Binary file not shown.
BIN
fixtures/diff-original.bin.blockmap
Normal file
BIN
fixtures/diff-original.bin.blockmap
Normal file
Binary file not shown.
53
ts/scripts/generate-fixtures.ts
Normal file
53
ts/scripts/generate-fixtures.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
// Copyright 2022 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { execFileSync } from 'child_process';
|
||||
|
||||
const FIXTURES = path.join(__dirname, '..', '..', 'fixtures');
|
||||
const SIZE = 256 * 1024;
|
||||
|
||||
async function main() {
|
||||
const original = crypto.randomBytes(SIZE);
|
||||
|
||||
const originalPath = path.join(FIXTURES, 'diff-original.bin');
|
||||
await fs.writeFile(originalPath, original);
|
||||
|
||||
// Add a broken byte to help create useful blockmaps
|
||||
original[Math.floor(Math.random() * original.length)] = 0;
|
||||
|
||||
const modifiedPath = path.join(FIXTURES, 'diff-modified.bin');
|
||||
await fs.writeFile(modifiedPath, original);
|
||||
|
||||
const appBuilder = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'node_modules',
|
||||
'app-builder-bin',
|
||||
'mac',
|
||||
'app-builder_amd64'
|
||||
);
|
||||
|
||||
for (const filePath of [originalPath, modifiedPath]) {
|
||||
console.log('Adding blockmap to', filePath);
|
||||
|
||||
// Put blockmap into a separate file
|
||||
console.log(
|
||||
execFileSync(appBuilder, [
|
||||
'blockmap',
|
||||
'--input',
|
||||
filePath,
|
||||
'--output',
|
||||
`${filePath}.blockmap`,
|
||||
]).toString()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
|
@ -4,7 +4,7 @@
|
|||
import { assert } from 'chai';
|
||||
|
||||
import {
|
||||
createTempDir,
|
||||
createUpdateCacheDirIfNeeded,
|
||||
getUpdateFileName,
|
||||
getVersion,
|
||||
isUpdateFileNameValid,
|
||||
|
@ -138,12 +138,12 @@ releaseDate: '2021-12-03T19:00:23.754Z'
|
|||
|
||||
describe('#validatePath', () => {
|
||||
it('succeeds for simple children', async () => {
|
||||
const base = await createTempDir();
|
||||
const base = await createUpdateCacheDirIfNeeded();
|
||||
validatePath(base, `${base}/child`);
|
||||
validatePath(base, `${base}/child/grandchild`);
|
||||
});
|
||||
it('returns false for problematic names', async () => {
|
||||
const base = await createTempDir();
|
||||
const base = await createUpdateCacheDirIfNeeded();
|
||||
assert.throws(() => {
|
||||
validatePath(base, `${base}/../child`);
|
||||
});
|
||||
|
|
170
ts/test-node/updater/differential_test.ts
Normal file
170
ts/test-node/updater/differential_test.ts
Normal file
|
@ -0,0 +1,170 @@
|
|||
// Copyright 2022 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { assert } from 'chai';
|
||||
import path from 'path';
|
||||
import http from 'http';
|
||||
import fs from 'fs/promises';
|
||||
import { tmpdir } from 'os';
|
||||
|
||||
import { strictAssert } from '../../util/assert';
|
||||
import {
|
||||
computeDiff,
|
||||
getBlockMapFileName,
|
||||
prepareDownload,
|
||||
download,
|
||||
} from '../../updater/differential';
|
||||
|
||||
const FIXTURES = path.join(__dirname, '..', '..', '..', 'fixtures');
|
||||
|
||||
describe('updater/differential', () => {
|
||||
describe('computeDiff', () => {
|
||||
it('computes correct difference', () => {
|
||||
const old = [
|
||||
{ checksum: 'a', offset: 0, size: 2 },
|
||||
{ checksum: 'b', offset: 2, size: 4 },
|
||||
{ checksum: 'c', offset: 6, size: 1 },
|
||||
{ checksum: 'c', offset: 7, size: 1 },
|
||||
{ checksum: 'd', offset: 8, size: 4 },
|
||||
];
|
||||
|
||||
const next = [
|
||||
{ checksum: 'prepend', offset: 0, size: 2 },
|
||||
{ checksum: 'not a', offset: 2, size: 4 },
|
||||
{ checksum: 'b', offset: 6, size: 4 },
|
||||
{ checksum: 'c', offset: 10, size: 1 },
|
||||
{ checksum: 'c', offset: 11, size: 1 },
|
||||
{ checksum: 'insert', offset: 12, size: 5 },
|
||||
{ checksum: 'c', offset: 17, size: 1 },
|
||||
{ checksum: 'd', offset: 18, size: 4 },
|
||||
{ checksum: 'append', offset: 22, size: 3 },
|
||||
];
|
||||
|
||||
assert.deepStrictEqual(computeDiff(old, next), [
|
||||
{ action: 'download', readOffset: 0, size: 6, writeOffset: 0 },
|
||||
{ action: 'copy', readOffset: 2, size: 6, writeOffset: 6 },
|
||||
// Note: this includes the third "c"
|
||||
{ action: 'download', readOffset: 12, size: 6, writeOffset: 12 },
|
||||
// This is "d"
|
||||
{ action: 'copy', readOffset: 8, size: 4, writeOffset: 18 },
|
||||
{ action: 'download', readOffset: 22, size: 3, writeOffset: 22 },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareDownload/download', () => {
|
||||
const oldFile = 'diff-original.bin';
|
||||
const oldBlockFile = getBlockMapFileName(oldFile);
|
||||
|
||||
const newFile = 'diff-modified.bin';
|
||||
const newBlockFile = getBlockMapFileName(newFile);
|
||||
const newHash =
|
||||
'1+eipIhsN0KhpXQdRnXnGzdBCP3sgYqIXf+WK/KDK08' +
|
||||
'VvH0acjX9PGf+ilIVYYWsOqp02lxrdx4gXW7V+RZY5w==';
|
||||
|
||||
const allowedFiles = new Set([
|
||||
oldFile,
|
||||
oldBlockFile,
|
||||
newFile,
|
||||
newBlockFile,
|
||||
]);
|
||||
|
||||
let server: http.Server;
|
||||
let baseUrl: string;
|
||||
|
||||
beforeEach(callback => {
|
||||
server = http.createServer(async (req, res) => {
|
||||
const file = req.url?.slice(1) ?? '';
|
||||
if (!allowedFiles.has(file)) {
|
||||
res.writeHead(404);
|
||||
res.end('Not found');
|
||||
return;
|
||||
}
|
||||
|
||||
const range = req.headers.range?.match(/^bytes=(\d+)-(\d+)$/);
|
||||
|
||||
let content = await fs.readFile(path.join(FIXTURES, file));
|
||||
const totalSize = content.length;
|
||||
if (range) {
|
||||
content = content.slice(
|
||||
parseInt(range[1], 10),
|
||||
parseInt(range[2], 10) + 1
|
||||
);
|
||||
|
||||
res.setHeader(
|
||||
'content-range',
|
||||
`bytes ${range[1]}-${range[2]}/${totalSize}`
|
||||
);
|
||||
res.writeHead(206);
|
||||
} else {
|
||||
res.writeHead(200);
|
||||
}
|
||||
|
||||
res.end(content);
|
||||
});
|
||||
|
||||
server.unref();
|
||||
|
||||
server.listen(0, () => {
|
||||
const addr = server.address();
|
||||
strictAssert(typeof addr === 'object' && addr, 'node.js apis');
|
||||
baseUrl = `http://127.0.0.1:${addr.port}`;
|
||||
|
||||
callback();
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.close();
|
||||
});
|
||||
|
||||
it('prepares the download', async () => {
|
||||
const data = await prepareDownload({
|
||||
oldFile: path.join(FIXTURES, oldFile),
|
||||
newUrl: `${baseUrl}/${newFile}`,
|
||||
sha512: newHash,
|
||||
});
|
||||
|
||||
assert.strictEqual(data.downloadSize, 32768);
|
||||
assert.deepStrictEqual(data.diff, [
|
||||
{ action: 'copy', readOffset: 0, size: 204635, writeOffset: 0 },
|
||||
{
|
||||
action: 'download',
|
||||
size: 32768,
|
||||
readOffset: 204635,
|
||||
writeOffset: 204635,
|
||||
},
|
||||
{
|
||||
action: 'copy',
|
||||
readOffset: 237403,
|
||||
size: 24741,
|
||||
writeOffset: 237403,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('downloads the file', async () => {
|
||||
const data = await prepareDownload({
|
||||
oldFile: path.join(FIXTURES, oldFile),
|
||||
newUrl: `${baseUrl}/${newFile}`,
|
||||
sha512: newHash,
|
||||
});
|
||||
|
||||
const outDir = await fs.mkdtemp(path.join(tmpdir(), 'signal-temp-'));
|
||||
await fs.mkdir(outDir, { recursive: true });
|
||||
|
||||
const outFile = path.join(outDir, 'out.bin');
|
||||
const chunks = new Array<number>();
|
||||
await download(outFile, data, size => chunks.push(size));
|
||||
|
||||
const expected = await fs.readFile(path.join(FIXTURES, newFile));
|
||||
const actual = await fs.readFile(outFile);
|
||||
|
||||
assert.isTrue(actual.equals(expected), 'Files do not match');
|
||||
assert.isTrue(
|
||||
chunks.length > 0,
|
||||
'Expected multiple callback invocations'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -2,24 +2,20 @@
|
|||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
/* eslint-disable no-console */
|
||||
import {
|
||||
createWriteStream,
|
||||
statSync,
|
||||
writeFile as writeFileCallback,
|
||||
} from 'fs';
|
||||
import { createWriteStream, statSync } from 'fs';
|
||||
import { pathExists } from 'fs-extra';
|
||||
import { readdir, writeFile } from 'fs/promises';
|
||||
import { promisify } from 'util';
|
||||
import { execFile } from 'child_process';
|
||||
import { join, normalize, dirname } from 'path';
|
||||
import { join, normalize, extname } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import { throttle } from 'lodash';
|
||||
|
||||
import type { ParserConfiguration } from 'dashdash';
|
||||
import { createParser } from 'dashdash';
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
import { FAILSAFE_SCHEMA, safeLoad } from 'js-yaml';
|
||||
import { gt } from 'semver';
|
||||
import config from 'config';
|
||||
import type { StrictOptions as GotOptions } from 'got';
|
||||
import got from 'got';
|
||||
import { v4 as getGuid } from 'uuid';
|
||||
import pify from 'pify';
|
||||
|
@ -29,11 +25,11 @@ import type { BrowserWindow } from 'electron';
|
|||
import { app, ipcMain } from 'electron';
|
||||
|
||||
import * as durations from '../util/durations';
|
||||
import { getTempPath } from '../util/attachments';
|
||||
import { getTempPath, getUpdateCachePath } from '../util/attachments';
|
||||
import { DialogType } from '../types/Dialogs';
|
||||
import * as Errors from '../types/errors';
|
||||
import { getUserAgent } from '../util/getUserAgent';
|
||||
import { isAlpha, isBeta } from '../util/version';
|
||||
import { strictAssert } from '../util/assert';
|
||||
|
||||
import * as packageJson from '../../package.json';
|
||||
import {
|
||||
|
@ -45,15 +41,18 @@ import { isPathInside } from '../util/isPathInside';
|
|||
import type { SettingsChannel } from '../main/settingsChannel';
|
||||
|
||||
import type { LoggerType } from '../types/Logging';
|
||||
import { getGotOptions } from './got';
|
||||
import { checkIntegrity } from './util';
|
||||
import type { PrepareDownloadResultType as DifferentialDownloadDataType } from './differential';
|
||||
import {
|
||||
prepareDownload as prepareDifferentialDownload,
|
||||
download as downloadDifferentialData,
|
||||
getBlockMapFileName,
|
||||
} from './differential';
|
||||
|
||||
const writeFile = pify(writeFileCallback);
|
||||
const mkdirpPromise = pify(mkdirp);
|
||||
const rimrafPromise = pify(rimraf);
|
||||
|
||||
export const GOT_CONNECT_TIMEOUT = 2 * 60 * 1000;
|
||||
export const GOT_LOOKUP_TIMEOUT = 2 * 60 * 1000;
|
||||
export const GOT_SOCKET_TIMEOUT = 2 * 60 * 1000;
|
||||
|
||||
const INTERVAL = 30 * durations.MINUTE;
|
||||
|
||||
type JSONUpdateSchema = {
|
||||
|
@ -73,6 +72,8 @@ export type UpdateInformationType = {
|
|||
fileName: string;
|
||||
size: number;
|
||||
version: string;
|
||||
sha512: string;
|
||||
differentialData: DifferentialDownloadDataType | undefined;
|
||||
};
|
||||
|
||||
export abstract class Updater {
|
||||
|
@ -80,8 +81,6 @@ export abstract class Updater {
|
|||
|
||||
protected version: string | undefined;
|
||||
|
||||
protected updateFilePath: string | undefined;
|
||||
|
||||
constructor(
|
||||
protected readonly logger: LoggerType,
|
||||
private readonly settingsChannel: SettingsChannel,
|
||||
|
@ -99,8 +98,6 @@ export abstract class Updater {
|
|||
public async start(): Promise<void> {
|
||||
this.logger.info('updater/start: starting checks...');
|
||||
|
||||
app.once('quit', () => this.quitHandler());
|
||||
|
||||
setInterval(async () => {
|
||||
try {
|
||||
await this.checkForUpdatesMaybeInstall();
|
||||
|
@ -113,12 +110,6 @@ export abstract class Updater {
|
|||
await this.checkForUpdatesMaybeInstall();
|
||||
}
|
||||
|
||||
public quitHandler(): void {
|
||||
if (this.updateFilePath) {
|
||||
this.deleteCache(this.updateFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Abstract methods
|
||||
//
|
||||
|
@ -141,36 +132,33 @@ export abstract class Updater {
|
|||
//
|
||||
|
||||
private async downloadAndInstall(
|
||||
newFileName: string,
|
||||
newVersion: string,
|
||||
updateInfo: UpdateInformationType,
|
||||
updateOnProgress?: boolean
|
||||
): Promise<void> {
|
||||
const { logger } = this;
|
||||
|
||||
const { fileName: newFileName, version: newVersion } = updateInfo;
|
||||
|
||||
try {
|
||||
const oldFileName = this.fileName;
|
||||
const oldVersion = this.version;
|
||||
|
||||
if (this.updateFilePath) {
|
||||
this.deleteCache(this.updateFilePath);
|
||||
}
|
||||
this.fileName = newFileName;
|
||||
this.version = newVersion;
|
||||
|
||||
let updateFilePath: string;
|
||||
try {
|
||||
this.updateFilePath = await this.downloadUpdate(
|
||||
this.fileName,
|
||||
updateFilePath = await this.downloadUpdate(
|
||||
updateInfo,
|
||||
updateOnProgress
|
||||
);
|
||||
} catch (error) {
|
||||
// Restore state in case of download error
|
||||
this.fileName = oldFileName;
|
||||
this.version = oldVersion;
|
||||
throw error;
|
||||
}
|
||||
|
||||
const publicKey = hexToBinary(config.get('updatesPublicKey'));
|
||||
const verified = await verifySignature(
|
||||
this.updateFilePath,
|
||||
updateFilePath,
|
||||
this.version,
|
||||
publicKey
|
||||
);
|
||||
|
@ -179,11 +167,11 @@ export abstract class Updater {
|
|||
// re-download the broken release. We will download it only once per launch.
|
||||
throw new Error(
|
||||
'Downloaded update did not pass signature verification ' +
|
||||
`(version: '${this.version}'; fileName: '${this.fileName}')`
|
||||
`(version: '${this.version}'; fileName: '${newFileName}')`
|
||||
);
|
||||
}
|
||||
|
||||
await this.installUpdate(this.updateFilePath);
|
||||
await this.installUpdate(updateFilePath);
|
||||
|
||||
const mainWindow = this.getMainWindow();
|
||||
if (mainWindow) {
|
||||
|
@ -209,21 +197,16 @@ export abstract class Updater {
|
|||
return;
|
||||
}
|
||||
|
||||
const { fileName: newFileName, version: newVersion } = result;
|
||||
const { version: newVersion } = result;
|
||||
|
||||
if (
|
||||
force ||
|
||||
this.fileName !== newFileName ||
|
||||
!this.version ||
|
||||
gt(newVersion, this.version)
|
||||
) {
|
||||
if (force || !this.version || gt(newVersion, this.version)) {
|
||||
const autoDownloadUpdates = await this.getAutoDownloadUpdateSetting();
|
||||
if (!autoDownloadUpdates) {
|
||||
this.setUpdateListener(async () => {
|
||||
logger.info(
|
||||
'checkForUpdatesMaybeInstall: have not downloaded update, going to download'
|
||||
);
|
||||
await this.downloadAndInstall(newFileName, newVersion, true);
|
||||
await this.downloadAndInstall(result, true);
|
||||
});
|
||||
const mainWindow = this.getMainWindow();
|
||||
|
||||
|
@ -243,13 +226,13 @@ export abstract class Updater {
|
|||
}
|
||||
return;
|
||||
}
|
||||
await this.downloadAndInstall(newFileName, newVersion);
|
||||
await this.downloadAndInstall(result);
|
||||
}
|
||||
}
|
||||
|
||||
private async checkForUpdates(
|
||||
forceUpdate = false
|
||||
): Promise<UpdateInformationType | null> {
|
||||
): Promise<UpdateInformationType | undefined> {
|
||||
const yaml = await getUpdateYaml();
|
||||
const parsedYaml = parseYaml(yaml);
|
||||
const version = getVersion(parsedYaml);
|
||||
|
@ -259,104 +242,248 @@ export abstract class Updater {
|
|||
'checkForUpdates: no version extracted from downloaded yaml'
|
||||
);
|
||||
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (forceUpdate || isVersionNewer(version)) {
|
||||
if (!forceUpdate && !isVersionNewer(version)) {
|
||||
this.logger.info(
|
||||
`checkForUpdates: found newer version ${version} ` +
|
||||
`forceUpdate=${forceUpdate}`
|
||||
`checkForUpdates: ${version} is not newer than ${packageJson.version}; ` +
|
||||
'no new update available'
|
||||
);
|
||||
|
||||
const fileName = getUpdateFileName(
|
||||
parsedYaml,
|
||||
process.platform,
|
||||
await this.getArch()
|
||||
);
|
||||
|
||||
return {
|
||||
fileName,
|
||||
size: getSize(parsedYaml, fileName),
|
||||
version,
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info(
|
||||
`checkForUpdates: ${version} is not newer; no new update available`
|
||||
`checkForUpdates: found newer version ${version} ` +
|
||||
`forceUpdate=${forceUpdate}`
|
||||
);
|
||||
|
||||
return null;
|
||||
const fileName = getUpdateFileName(
|
||||
parsedYaml,
|
||||
process.platform,
|
||||
await this.getArch()
|
||||
);
|
||||
|
||||
const sha512 = getSHA512(parsedYaml, fileName);
|
||||
strictAssert(sha512 !== undefined, 'Missing required hash');
|
||||
|
||||
const latestInstaller = await this.getLatestCachedInstaller(
|
||||
extname(fileName)
|
||||
);
|
||||
|
||||
let differentialData: DifferentialDownloadDataType | undefined;
|
||||
if (latestInstaller) {
|
||||
this.logger.info(
|
||||
`checkForUpdates: Found local installer ${latestInstaller}`
|
||||
);
|
||||
|
||||
try {
|
||||
differentialData = await prepareDifferentialDownload({
|
||||
oldFile: latestInstaller,
|
||||
newUrl: `${getUpdatesBase()}/${fileName}`,
|
||||
sha512,
|
||||
});
|
||||
|
||||
this.logger.info(
|
||||
'checkForUpdates: differential download size',
|
||||
differentialData.downloadSize
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
'checkForUpdates: Failed to prepare differential update',
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
fileName,
|
||||
size: getSize(parsedYaml, fileName),
|
||||
version,
|
||||
sha512,
|
||||
differentialData,
|
||||
};
|
||||
}
|
||||
|
||||
private async getLatestCachedInstaller(
|
||||
extension: string
|
||||
): Promise<string | undefined> {
|
||||
const cacheDir = await createUpdateCacheDirIfNeeded();
|
||||
const oldFiles = (await readdir(cacheDir)).map(fileName => {
|
||||
return join(cacheDir, fileName);
|
||||
});
|
||||
|
||||
return oldFiles.find(fileName => extname(fileName) === extension);
|
||||
}
|
||||
|
||||
private async downloadUpdate(
|
||||
fileName: string,
|
||||
{ fileName, sha512, differentialData }: UpdateInformationType,
|
||||
updateOnProgress?: boolean
|
||||
): Promise<string> {
|
||||
const baseUrl = getUpdatesBase();
|
||||
const updateFileUrl = `${baseUrl}/${fileName}`;
|
||||
|
||||
const signatureFileName = getSignatureFileName(fileName);
|
||||
const blockMapFileName = getBlockMapFileName(fileName);
|
||||
const signatureUrl = `${baseUrl}/${signatureFileName}`;
|
||||
const blockMapUrl = `${baseUrl}/${blockMapFileName}`;
|
||||
|
||||
const cacheDir = await createUpdateCacheDirIfNeeded();
|
||||
const targetUpdatePath = join(cacheDir, fileName);
|
||||
const targetSignaturePath = join(cacheDir, signatureFileName);
|
||||
const targetBlockMapPath = join(cacheDir, blockMapFileName);
|
||||
|
||||
const targetPaths = [
|
||||
targetUpdatePath,
|
||||
targetSignaturePath,
|
||||
targetBlockMapPath,
|
||||
];
|
||||
|
||||
// List of files to be deleted on success
|
||||
const oldFiles = (await readdir(cacheDir))
|
||||
.map(oldFileName => {
|
||||
return join(cacheDir, oldFileName);
|
||||
})
|
||||
.filter(path => !targetPaths.includes(path));
|
||||
|
||||
let tempDir;
|
||||
try {
|
||||
tempDir = await createTempDir();
|
||||
const targetUpdatePath = join(tempDir, fileName);
|
||||
const targetSignaturePath = join(tempDir, getSignatureFileName(fileName));
|
||||
|
||||
validatePath(tempDir, targetUpdatePath);
|
||||
validatePath(tempDir, targetSignaturePath);
|
||||
validatePath(cacheDir, targetUpdatePath);
|
||||
validatePath(cacheDir, targetSignaturePath);
|
||||
validatePath(cacheDir, targetBlockMapPath);
|
||||
|
||||
this.logger.info(`downloadUpdate: Downloading signature ${signatureUrl}`);
|
||||
const { body } = await got.get(signatureUrl, getGotOptions());
|
||||
await writeFile(targetSignaturePath, body);
|
||||
const signature = await got(signatureUrl, getGotOptions()).buffer();
|
||||
await writeFile(targetSignaturePath, signature);
|
||||
|
||||
this.logger.info(`downloadUpdate: Downloading update ${updateFileUrl}`);
|
||||
const downloadStream = got.stream(updateFileUrl, getGotOptions());
|
||||
const writeStream = createWriteStream(targetUpdatePath);
|
||||
try {
|
||||
this.logger.info(`downloadUpdate: Downloading blockmap ${blockMapUrl}`);
|
||||
const blockMap = await got(blockMapUrl, getGotOptions()).buffer();
|
||||
await writeFile(targetBlockMapPath, blockMap);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
'downloadUpdate: Failed to download blockmap, continuing',
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const mainWindow = this.getMainWindow();
|
||||
if (updateOnProgress && mainWindow) {
|
||||
let downloadedSize = 0;
|
||||
let gotUpdate = false;
|
||||
if (!gotUpdate && (await pathExists(targetUpdatePath))) {
|
||||
const checkResult = await checkIntegrity(targetUpdatePath, sha512);
|
||||
if (checkResult.ok) {
|
||||
this.logger.info(
|
||||
`downloadUpdate: Not downloading update ${updateFileUrl}, ` +
|
||||
'local file has the same hash'
|
||||
);
|
||||
gotUpdate = true;
|
||||
} else {
|
||||
this.logger.error(
|
||||
'downloadUpdate: integrity check failure',
|
||||
checkResult.error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const throttledSend = throttle(() => {
|
||||
mainWindow.webContents.send(
|
||||
if (!gotUpdate && differentialData) {
|
||||
this.logger.info(
|
||||
`downloadUpdate: Downloading differential update ${updateFileUrl}`
|
||||
);
|
||||
|
||||
try {
|
||||
const mainWindow = this.getMainWindow();
|
||||
|
||||
const throttledSend = throttle((downloadedSize: number) => {
|
||||
mainWindow?.webContents.send(
|
||||
'show-update-dialog',
|
||||
DialogType.Downloading,
|
||||
{ downloadedSize }
|
||||
);
|
||||
}, 500);
|
||||
|
||||
downloadStream.on('data', data => {
|
||||
downloadedSize += data.length;
|
||||
throttledSend();
|
||||
});
|
||||
await downloadDifferentialData(
|
||||
targetUpdatePath,
|
||||
differentialData,
|
||||
updateOnProgress ? throttledSend : undefined
|
||||
);
|
||||
|
||||
gotUpdate = true;
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
'downloadUpdate: Failed to apply differential update',
|
||||
Errors.toLogFormat(error)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
downloadStream.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
downloadStream.on('end', () => {
|
||||
resolve();
|
||||
});
|
||||
if (!gotUpdate) {
|
||||
this.logger.info(
|
||||
`downloadUpdate: Downloading full update ${updateFileUrl}`
|
||||
);
|
||||
await this.downloadAndReport(
|
||||
updateFileUrl,
|
||||
targetUpdatePath,
|
||||
updateOnProgress
|
||||
);
|
||||
gotUpdate = true;
|
||||
}
|
||||
strictAssert(gotUpdate, 'We should get the update one way or another');
|
||||
|
||||
writeStream.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
downloadStream.pipe(writeStream);
|
||||
});
|
||||
// Now that we successfully downloaded an update - remove old files
|
||||
await Promise.all(oldFiles.map(path => rimrafPromise(path)));
|
||||
|
||||
return targetUpdatePath;
|
||||
} catch (error) {
|
||||
if (tempDir) {
|
||||
await deleteTempDir(tempDir);
|
||||
try {
|
||||
await Promise.all([targetPaths.map(path => rimrafPromise(path))]);
|
||||
} catch (_) {
|
||||
// Ignore error, this is a cleanup
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async downloadAndReport(
|
||||
updateFileUrl: string,
|
||||
targetUpdatePath: string,
|
||||
updateOnProgress = false
|
||||
): Promise<void> {
|
||||
const downloadStream = got.stream(updateFileUrl, getGotOptions());
|
||||
const writeStream = createWriteStream(targetUpdatePath);
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const mainWindow = this.getMainWindow();
|
||||
if (updateOnProgress && mainWindow) {
|
||||
let downloadedSize = 0;
|
||||
|
||||
const throttledSend = throttle(() => {
|
||||
mainWindow.webContents.send(
|
||||
'show-update-dialog',
|
||||
DialogType.Downloading,
|
||||
{ downloadedSize }
|
||||
);
|
||||
}, 500);
|
||||
|
||||
downloadStream.on('data', data => {
|
||||
downloadedSize += data.length;
|
||||
throttledSend();
|
||||
});
|
||||
}
|
||||
|
||||
downloadStream.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
downloadStream.on('end', () => {
|
||||
resolve();
|
||||
});
|
||||
|
||||
writeStream.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
downloadStream.pipe(writeStream);
|
||||
});
|
||||
}
|
||||
|
||||
private async getAutoDownloadUpdateSetting(): Promise<boolean> {
|
||||
try {
|
||||
return await this.settingsChannel.getSettingFromMainWindow(
|
||||
|
@ -371,18 +498,6 @@ export abstract class Updater {
|
|||
}
|
||||
}
|
||||
|
||||
private async deleteCache(filePath: string | null): Promise<void> {
|
||||
if (!filePath) {
|
||||
return;
|
||||
}
|
||||
const tempDir = dirname(filePath);
|
||||
try {
|
||||
await deleteTempDir(tempDir);
|
||||
} catch (error) {
|
||||
this.logger.error(`quitHandler: ${Errors.toLogFormat(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async getArch(): Promise<typeof process.arch> {
|
||||
if (process.platform !== 'darwin' || process.arch === 'arm64') {
|
||||
return process.arch;
|
||||
|
@ -427,12 +542,6 @@ export function getUpdateCheckUrl(): string {
|
|||
export function getUpdatesBase(): string {
|
||||
return config.get('updatesUrl');
|
||||
}
|
||||
export function getCertificateAuthority(): string {
|
||||
return config.get('certificateAuthority');
|
||||
}
|
||||
export function getProxyUrl(): string | undefined {
|
||||
return process.env.HTTPS_PROXY || process.env.https_proxy;
|
||||
}
|
||||
|
||||
export function getUpdatesFileName(): string {
|
||||
const prefix = getChannel();
|
||||
|
@ -504,9 +613,22 @@ export function getUpdateFileName(
|
|||
return path;
|
||||
}
|
||||
|
||||
function getSHA512(
|
||||
info: JSONUpdateSchema,
|
||||
fileName: string
|
||||
): string | undefined {
|
||||
if (!info || !info.files) {
|
||||
throw new Error('getSHA512: No files present in YAML file');
|
||||
}
|
||||
|
||||
const foundFile = info.files.find(file => file.url === fileName);
|
||||
|
||||
return foundFile?.sha512;
|
||||
}
|
||||
|
||||
function getSize(info: JSONUpdateSchema, fileName: string): number {
|
||||
if (!info || !info.files) {
|
||||
throw new Error('getUpdateFileName: No files present in YAML file');
|
||||
throw new Error('getSize: No files present in YAML file');
|
||||
}
|
||||
|
||||
const foundFile = info.files.find(file => file.url === fileName);
|
||||
|
@ -529,35 +651,6 @@ async function getUpdateYaml(): Promise<string> {
|
|||
return body;
|
||||
}
|
||||
|
||||
function getGotOptions(): GotOptions {
|
||||
const certificateAuthority = getCertificateAuthority();
|
||||
const proxyUrl = getProxyUrl();
|
||||
const agent = proxyUrl
|
||||
? {
|
||||
http: new ProxyAgent(proxyUrl),
|
||||
https: new ProxyAgent(proxyUrl),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
agent,
|
||||
https: {
|
||||
certificateAuthority,
|
||||
},
|
||||
headers: {
|
||||
'Cache-Control': 'no-cache',
|
||||
'User-Agent': getUserAgent(packageJson.version),
|
||||
},
|
||||
timeout: {
|
||||
connect: GOT_CONNECT_TIMEOUT,
|
||||
lookup: GOT_LOOKUP_TIMEOUT,
|
||||
|
||||
// This timeout is reset whenever we get new data on the socket
|
||||
socket: GOT_SOCKET_TIMEOUT,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getBaseTempDir() {
|
||||
// We only use tmpdir() when this code is run outside of an Electron app (as in: tests)
|
||||
return app ? getTempPath(app.getPath('userData')) : tmpdir();
|
||||
|
@ -572,6 +665,18 @@ export async function createTempDir(): Promise<string> {
|
|||
return targetDir;
|
||||
}
|
||||
|
||||
function getUpdateCacheDir() {
|
||||
// We only use tmpdir() when this code is run outside of an Electron app (as in: tests)
|
||||
return app ? getUpdateCachePath(app.getPath('userData')) : tmpdir();
|
||||
}
|
||||
|
||||
export async function createUpdateCacheDirIfNeeded(): Promise<string> {
|
||||
const targetDir = getUpdateCacheDir();
|
||||
await mkdirpPromise(targetDir);
|
||||
|
||||
return targetDir;
|
||||
}
|
||||
|
||||
export async function deleteTempDir(targetDir: string): Promise<void> {
|
||||
const pathInfo = statSync(targetDir);
|
||||
if (!pathInfo.isDirectory()) {
|
||||
|
|
295
ts/updater/differential.ts
Normal file
295
ts/updater/differential.ts
Normal file
|
@ -0,0 +1,295 @@
|
|||
// Copyright 2022 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { readFile, open, mkdtemp, mkdir, rename, unlink } from 'fs/promises';
|
||||
import { promisify } from 'util';
|
||||
import { gunzip as nativeGunzip } from 'zlib';
|
||||
import { tmpdir } from 'os';
|
||||
import path from 'path';
|
||||
import got from 'got';
|
||||
import pMap from 'p-map';
|
||||
|
||||
import { strictAssert } from '../util/assert';
|
||||
import { getGotOptions } from './got';
|
||||
import { checkIntegrity } from './util';
|
||||
|
||||
const gunzip = promisify(nativeGunzip);
|
||||
|
||||
const SUPPORTED_VERSION = '2';
|
||||
const MAX_CONCURRENCY = 5;
|
||||
|
||||
type BlockMapFileJSONType = Readonly<{
|
||||
version: string;
|
||||
files: ReadonlyArray<
|
||||
Readonly<{
|
||||
name: string;
|
||||
offset: number;
|
||||
checksums: ReadonlyArray<string>;
|
||||
sizes: ReadonlyArray<number>;
|
||||
}>
|
||||
>;
|
||||
}>;
|
||||
|
||||
export type BlockMapBlockType = Readonly<{
|
||||
offset: number;
|
||||
size: number;
|
||||
checksum: string;
|
||||
}>;
|
||||
|
||||
export type BlockMapType = ReadonlyArray<BlockMapBlockType>;
|
||||
|
||||
export type DiffType = {
|
||||
action: 'download' | 'copy';
|
||||
size: number;
|
||||
readOffset: number;
|
||||
writeOffset: number;
|
||||
};
|
||||
|
||||
export type ComputeDiffResultType = ReadonlyArray<Readonly<DiffType>>;
|
||||
|
||||
export type PrepareDownloadResultType = Readonly<{
|
||||
downloadSize: number;
|
||||
oldFile: string;
|
||||
newUrl: string;
|
||||
sha512: string;
|
||||
diff: ComputeDiffResultType;
|
||||
}>;
|
||||
|
||||
export type PrepareDownloadOptionsType = Readonly<{
|
||||
oldFile: string;
|
||||
newUrl: string;
|
||||
sha512: string;
|
||||
}>;
|
||||
|
||||
export function getBlockMapFileName(fileName: string): string {
|
||||
return `${fileName}.blockmap`;
|
||||
}
|
||||
|
||||
export async function parseBlockMap(data: Buffer): Promise<BlockMapType> {
|
||||
const unpacked = await gunzip(data);
|
||||
const json: BlockMapFileJSONType = JSON.parse(unpacked.toString());
|
||||
|
||||
strictAssert(
|
||||
json.version === SUPPORTED_VERSION,
|
||||
`Unsupported blockmap version: ${json.version}`
|
||||
);
|
||||
strictAssert(
|
||||
json.files.length === 1,
|
||||
`Unsupported blockmap file count: ${json.files.length}`
|
||||
);
|
||||
|
||||
const [file] = json.files;
|
||||
let { offset } = file;
|
||||
|
||||
const blocks = new Array<BlockMapBlockType>();
|
||||
for (const [i, checksum] of file.checksums.entries()) {
|
||||
const size = file.sizes[i];
|
||||
strictAssert(size !== undefined, `missing block size: ${i}`);
|
||||
|
||||
blocks.push({
|
||||
offset,
|
||||
size,
|
||||
checksum,
|
||||
});
|
||||
|
||||
offset += size;
|
||||
}
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
export function computeDiff(
|
||||
oldMap: BlockMapType,
|
||||
newMap: BlockMapType
|
||||
): ComputeDiffResultType {
|
||||
const oldChecksums = new Map<string, Array<BlockMapBlockType>>();
|
||||
for (const oldBlock of oldMap) {
|
||||
let list = oldChecksums.get(oldBlock.checksum);
|
||||
if (!list) {
|
||||
list = [];
|
||||
oldChecksums.set(oldBlock.checksum, list);
|
||||
}
|
||||
|
||||
list.push(oldBlock);
|
||||
}
|
||||
|
||||
const diff = new Array<DiffType>();
|
||||
|
||||
let writeOffset = 0;
|
||||
for (const newBlock of newMap) {
|
||||
const oldBlocks = oldChecksums.get(newBlock.checksum);
|
||||
if (oldBlocks) {
|
||||
const oldBlock = oldBlocks.shift();
|
||||
strictAssert(oldBlock, 'Missing expected old block');
|
||||
if (oldBlocks.length === 0) {
|
||||
oldChecksums.delete(newBlock.checksum);
|
||||
}
|
||||
|
||||
strictAssert(
|
||||
oldBlock.size === newBlock.size,
|
||||
`Block size mismatch: ${newBlock.checksum}, ` +
|
||||
`${oldBlock.size} != ${newBlock.size}`
|
||||
);
|
||||
|
||||
diff.push({
|
||||
action: 'copy',
|
||||
size: oldBlock.size,
|
||||
readOffset: oldBlock.offset,
|
||||
writeOffset,
|
||||
});
|
||||
writeOffset += oldBlock.size;
|
||||
continue;
|
||||
}
|
||||
|
||||
diff.push({
|
||||
action: 'download',
|
||||
size: newBlock.size,
|
||||
readOffset: newBlock.offset,
|
||||
writeOffset,
|
||||
});
|
||||
writeOffset += newBlock.size;
|
||||
}
|
||||
|
||||
const optimizedDiff = new Array<DiffType>();
|
||||
for (const entry of diff) {
|
||||
const last =
|
||||
optimizedDiff.length !== 0
|
||||
? optimizedDiff[optimizedDiff.length - 1]
|
||||
: undefined;
|
||||
|
||||
const { action, readOffset, size } = entry;
|
||||
if (
|
||||
!last ||
|
||||
last.action !== action ||
|
||||
last.readOffset + last.size !== readOffset
|
||||
) {
|
||||
optimizedDiff.push(entry);
|
||||
continue;
|
||||
}
|
||||
|
||||
last.size += size;
|
||||
}
|
||||
|
||||
return optimizedDiff;
|
||||
}
|
||||
|
||||
export async function prepareDownload({
|
||||
oldFile,
|
||||
newUrl,
|
||||
sha512,
|
||||
}: PrepareDownloadOptionsType): Promise<PrepareDownloadResultType> {
|
||||
const oldBlockMap = await parseBlockMap(
|
||||
await readFile(getBlockMapFileName(oldFile))
|
||||
);
|
||||
|
||||
const newBlockMap = await parseBlockMap(
|
||||
await got(getBlockMapFileName(newUrl), getGotOptions()).buffer()
|
||||
);
|
||||
|
||||
const diff = computeDiff(oldBlockMap, newBlockMap);
|
||||
|
||||
let downloadSize = 0;
|
||||
for (const { action, size } of diff) {
|
||||
if (action === 'download') {
|
||||
downloadSize += size;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
downloadSize,
|
||||
diff,
|
||||
oldFile,
|
||||
newUrl,
|
||||
sha512,
|
||||
};
|
||||
}
|
||||
|
||||
export async function download(
|
||||
newFile: string,
|
||||
{ diff, oldFile, newUrl, sha512 }: PrepareDownloadResultType,
|
||||
statusCallback?: (downloadedSize: number) => void
|
||||
): Promise<void> {
|
||||
const input = await open(oldFile, 'r');
|
||||
|
||||
const tempDir = await mkdtemp(path.join(tmpdir(), 'signal-temp-'));
|
||||
await mkdir(tempDir, { recursive: true });
|
||||
const tempFile = path.join(tempDir, path.basename(newFile));
|
||||
|
||||
const output = await open(tempFile, 'w');
|
||||
|
||||
// Share agent
|
||||
const gotOptions = getGotOptions();
|
||||
|
||||
let downloadedSize = 0;
|
||||
|
||||
await pMap(
|
||||
diff,
|
||||
async ({ action, readOffset, size, writeOffset }) => {
|
||||
if (action === 'copy') {
|
||||
const chunk = Buffer.alloc(size);
|
||||
const { bytesRead } = await input.read(
|
||||
chunk,
|
||||
0,
|
||||
chunk.length,
|
||||
readOffset
|
||||
);
|
||||
|
||||
strictAssert(
|
||||
bytesRead === size,
|
||||
`Not enough data to read from offset=${readOffset} size=${size}`
|
||||
);
|
||||
|
||||
await output.write(chunk, 0, chunk.length, writeOffset);
|
||||
|
||||
downloadedSize += chunk.length;
|
||||
statusCallback?.(downloadedSize);
|
||||
return;
|
||||
}
|
||||
|
||||
strictAssert(action === 'download', 'invalid action type');
|
||||
const stream = got.stream(`${newUrl}`, {
|
||||
...gotOptions,
|
||||
headers: {
|
||||
range: `bytes=${readOffset}-${readOffset + size - 1}`,
|
||||
},
|
||||
});
|
||||
|
||||
stream.once('response', ({ statusCode }) => {
|
||||
if (statusCode !== 206) {
|
||||
stream.destroy(new Error(`Invalid status code: ${statusCode}`));
|
||||
}
|
||||
});
|
||||
|
||||
let lastOffset = writeOffset;
|
||||
for await (const chunk of stream) {
|
||||
strictAssert(
|
||||
lastOffset - writeOffset + chunk.length <= size,
|
||||
'Server returned more data than expected'
|
||||
);
|
||||
await output.write(chunk, 0, chunk.length, lastOffset);
|
||||
lastOffset += chunk.length;
|
||||
|
||||
downloadedSize += chunk.length;
|
||||
statusCallback?.(downloadedSize);
|
||||
}
|
||||
strictAssert(
|
||||
lastOffset - writeOffset === size,
|
||||
`Not enough data to download from offset=${readOffset} size=${size}`
|
||||
);
|
||||
},
|
||||
{ concurrency: MAX_CONCURRENCY }
|
||||
);
|
||||
|
||||
await Promise.all([input.close(), output.close()]);
|
||||
|
||||
const checkResult = await checkIntegrity(tempFile, sha512);
|
||||
strictAssert(checkResult.ok, checkResult.error ?? '');
|
||||
|
||||
// Finally move the file into its final location
|
||||
try {
|
||||
await unlink(newFile);
|
||||
} catch (_) {
|
||||
// ignore errors
|
||||
}
|
||||
await rename(tempFile, newFile);
|
||||
}
|
50
ts/updater/got.ts
Normal file
50
ts/updater/got.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
// Copyright 2019-2022 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import type { StrictOptions as GotOptions } from 'got';
|
||||
import config from 'config';
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
|
||||
import * as packageJson from '../../package.json';
|
||||
import { getUserAgent } from '../util/getUserAgent';
|
||||
|
||||
export const GOT_CONNECT_TIMEOUT = 2 * 60 * 1000;
|
||||
export const GOT_LOOKUP_TIMEOUT = 2 * 60 * 1000;
|
||||
export const GOT_SOCKET_TIMEOUT = 2 * 60 * 1000;
|
||||
|
||||
export function getProxyUrl(): string | undefined {
|
||||
return process.env.HTTPS_PROXY || process.env.https_proxy;
|
||||
}
|
||||
|
||||
export function getCertificateAuthority(): string {
|
||||
return config.get('certificateAuthority');
|
||||
}
|
||||
|
||||
export function getGotOptions(): GotOptions {
|
||||
const certificateAuthority = getCertificateAuthority();
|
||||
const proxyUrl = getProxyUrl();
|
||||
const agent = proxyUrl
|
||||
? {
|
||||
http: new ProxyAgent(proxyUrl),
|
||||
https: new ProxyAgent(proxyUrl),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return {
|
||||
agent,
|
||||
https: {
|
||||
certificateAuthority,
|
||||
},
|
||||
headers: {
|
||||
'Cache-Control': 'no-cache',
|
||||
'User-Agent': getUserAgent(packageJson.version),
|
||||
},
|
||||
timeout: {
|
||||
connect: GOT_CONNECT_TIMEOUT,
|
||||
lookup: GOT_LOOKUP_TIMEOUT,
|
||||
|
||||
// This timeout is reset whenever we get new data on the socket
|
||||
socket: GOT_SOCKET_TIMEOUT,
|
||||
},
|
||||
};
|
||||
}
|
45
ts/updater/util.ts
Normal file
45
ts/updater/util.ts
Normal file
|
@ -0,0 +1,45 @@
|
|||
// Copyright 2022 Signal Messenger, LLC
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
import { createReadStream } from 'fs';
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
import * as Errors from '../types/errors';
|
||||
|
||||
export type CheckIntegrityResultType = Readonly<
|
||||
| {
|
||||
ok: true;
|
||||
error?: void;
|
||||
}
|
||||
| {
|
||||
ok: false;
|
||||
error: string;
|
||||
}
|
||||
>;
|
||||
|
||||
export async function checkIntegrity(
|
||||
fileName: string,
|
||||
sha512: string
|
||||
): Promise<CheckIntegrityResultType> {
|
||||
try {
|
||||
const hash = createHash('sha512');
|
||||
for await (const chunk of createReadStream(fileName)) {
|
||||
hash.update(chunk);
|
||||
}
|
||||
|
||||
const actualSHA512 = hash.digest('base64');
|
||||
if (sha512 === actualSHA512) {
|
||||
return { ok: true };
|
||||
}
|
||||
|
||||
return {
|
||||
ok: false,
|
||||
error: `Integrity check failure: expected ${sha512}, got ${actualSHA512}`,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
ok: false,
|
||||
error: Errors.toLogFormat(error),
|
||||
};
|
||||
}
|
||||
}
|
|
@ -12,6 +12,7 @@ const AVATAR_PATH = 'avatars.noindex';
|
|||
const BADGES_PATH = 'badges.noindex';
|
||||
const STICKER_PATH = 'stickers.noindex';
|
||||
const TEMP_PATH = 'temp';
|
||||
const UPDATE_CACHE_PATH = 'update-cache';
|
||||
const DRAFT_PATH = 'drafts.noindex';
|
||||
|
||||
const createPathGetter =
|
||||
|
@ -29,6 +30,7 @@ export const getDraftPath = createPathGetter(DRAFT_PATH);
|
|||
export const getPath = createPathGetter(PATH);
|
||||
export const getStickersPath = createPathGetter(STICKER_PATH);
|
||||
export const getTempPath = createPathGetter(TEMP_PATH);
|
||||
export const getUpdateCachePath = createPathGetter(UPDATE_CACHE_PATH);
|
||||
|
||||
export const createDeleter = (
|
||||
root: string
|
||||
|
|
Loading…
Add table
Reference in a new issue