chore: cleanup following internal switch to readPackageJSON
(#44644)
chore: cleanup following internal switch to readPackageJSON
This commit is contained in:
parent
aca84be970
commit
a15e42e9ff
2 changed files with 12 additions and 57 deletions
|
@ -306,13 +306,17 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
|
|||
|
||||
const archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) {
|
||||
if (shouldThrowStatError(options)) throw createError(AsarError.INVALID_ARCHIVE, { asarPath });
|
||||
if (shouldThrowStatError(options)) {
|
||||
throw createError(AsarError.INVALID_ARCHIVE, { asarPath });
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
const stats = archive.stat(filePath);
|
||||
if (!stats) {
|
||||
if (shouldThrowStatError(options)) throw createError(AsarError.NOT_FOUND, { asarPath, filePath });
|
||||
if (shouldThrowStatError(options)) {
|
||||
throw createError(AsarError.NOT_FOUND, { asarPath, filePath });
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -690,7 +694,9 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
|
|||
const { encoding } = options;
|
||||
const buffer = Buffer.alloc(info.size);
|
||||
const fd = archive.getFdAndValidateIntegrityLater();
|
||||
if (!(fd >= 0)) throw createError(AsarError.NOT_FOUND, { asarPath, filePath });
|
||||
if (!(fd >= 0)) {
|
||||
throw createError(AsarError.NOT_FOUND, { asarPath, filePath });
|
||||
}
|
||||
|
||||
logASARAccess(asarPath, filePath, info.offset);
|
||||
fs.readSync(fd, buffer, 0, info.size, info.offset);
|
||||
|
@ -755,7 +761,7 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
|
|||
nextTick(callback!, [null, files]);
|
||||
};
|
||||
|
||||
const { readdir: readdirPromise } = require('fs').promises;
|
||||
const { readdir: readdirPromise } = fs.promises;
|
||||
fs.promises.readdir = async function (pathArgument: string, options: ReaddirOptions) {
|
||||
options = getOptions(options);
|
||||
pathArgument = getValidatedPath(pathArgument);
|
||||
|
@ -863,36 +869,8 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
|
|||
};
|
||||
|
||||
const binding = internalBinding('fs');
|
||||
const { internalModuleReadJSON, kUsePromises } = binding;
|
||||
internalBinding('fs').internalModuleReadJSON = (pathArgument: string) => {
|
||||
const pathInfo = splitPath(pathArgument);
|
||||
if (!pathInfo.isAsar) return internalModuleReadJSON(pathArgument);
|
||||
const { asarPath, filePath } = pathInfo;
|
||||
|
||||
const archive = getOrCreateArchive(asarPath);
|
||||
if (!archive) return [];
|
||||
|
||||
const info = archive.getFileInfo(filePath);
|
||||
if (!info) return [];
|
||||
if (info.size === 0) return ['', false];
|
||||
if (info.unpacked) {
|
||||
const realPath = archive.copyFileOut(filePath);
|
||||
const str = fs.readFileSync(realPath, { encoding: 'utf8' });
|
||||
return [str, str.length > 0];
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(info.size);
|
||||
const fd = archive.getFdAndValidateIntegrityLater();
|
||||
if (!(fd >= 0)) return [];
|
||||
|
||||
logASARAccess(asarPath, filePath, info.offset);
|
||||
fs.readSync(fd, buffer, 0, info.size, info.offset);
|
||||
validateBufferIntegrity(buffer, info.integrity);
|
||||
const str = buffer.toString('utf8');
|
||||
return [str, str.length > 0];
|
||||
};
|
||||
|
||||
const { internalModuleStat } = internalBinding('fs');
|
||||
const { internalModuleStat } = binding;
|
||||
internalBinding('fs').internalModuleStat = (pathArgument: string) => {
|
||||
const pathInfo = splitPath(pathArgument);
|
||||
if (!pathInfo.isAsar) return internalModuleStat(pathArgument);
|
||||
|
@ -909,6 +887,7 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
|
|||
return (stats.type === AsarFileType.kDirectory) ? 1 : 0;
|
||||
};
|
||||
|
||||
const { kUsePromises } = binding;
|
||||
async function readdirRecursive (originalPath: string, options: ReaddirOptions) {
|
||||
const result: any[] = [];
|
||||
|
||||
|
|
|
@ -1447,30 +1447,6 @@ describe('asar package', function () {
|
|||
generateSpecs('child_process');
|
||||
generateSpecs('node:child_process');
|
||||
|
||||
describe('internalModuleReadJSON', function () {
|
||||
itremote('reads a normal file', function () {
|
||||
const { internalModuleReadJSON } = (process as any).binding('fs');
|
||||
const file1 = path.join(asarDir, 'a.asar', 'file1');
|
||||
const [s1, c1] = internalModuleReadJSON(file1);
|
||||
expect([s1.toString().trim(), c1]).to.eql(['file1', true]);
|
||||
|
||||
const file2 = path.join(asarDir, 'a.asar', 'file2');
|
||||
const [s2, c2] = internalModuleReadJSON(file2);
|
||||
expect([s2.toString().trim(), c2]).to.eql(['file2', true]);
|
||||
|
||||
const file3 = path.join(asarDir, 'a.asar', 'file3');
|
||||
const [s3, c3] = internalModuleReadJSON(file3);
|
||||
expect([s3.toString().trim(), c3]).to.eql(['file3', true]);
|
||||
});
|
||||
|
||||
itremote('reads a normal file with unpacked files', function () {
|
||||
const { internalModuleReadJSON } = (process as any).binding('fs');
|
||||
const p = path.join(asarDir, 'unpack.asar', 'a.txt');
|
||||
const [s, c] = internalModuleReadJSON(p);
|
||||
expect([s.toString().trim(), c]).to.eql(['a', true]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('util.promisify', function () {
|
||||
itremote('can promisify all fs functions', function () {
|
||||
const originalFs = require('original-fs');
|
||||
|
|
Loading…
Add table
Reference in a new issue