refactor: mmap asar files (#24470)

This commit is contained in:
Jeremy Rose 2020-08-04 11:48:04 -07:00 committed by GitHub
parent 15ee34a1f2
commit 01a2e23194
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 133 additions and 114 deletions

View file

@ -529,17 +529,15 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
return fs.readFile(realPath, options, callback);
}
const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
if (!(fd >= 0)) {
const error = createError(AsarError.NOT_FOUND, { asarPath, filePath });
nextTick(callback, [error]);
return;
}
logASARAccess(asarPath, filePath, info.offset);
fs.read(fd, buffer, 0, info.size, info.offset, (error: Error) => {
callback(error, encoding ? buffer.toString(encoding) : buffer);
archive.read(info.offset, info.size).then((buf) => {
const buffer = Buffer.from(buf);
callback(null, encoding ? buffer.toString(encoding) : buffer);
}, (err) => {
const error: AsarErrorObject = new Error(`EINVAL, ${err.message} while reading ${filePath} in ${asarPath}`);
error.code = 'EINVAL';
error.errno = -22;
callback(error);
});
};
@ -572,13 +570,19 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
}
const { encoding } = options;
const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
if (!(fd >= 0)) throw createError(AsarError.NOT_FOUND, { asarPath, filePath });
logASARAccess(asarPath, filePath, info.offset);
fs.readSync(fd, buffer, 0, info.size, info.offset);
return (encoding) ? buffer.toString(encoding) : buffer;
let arrayBuffer: ArrayBuffer;
try {
arrayBuffer = archive.readSync(info.offset, info.size);
} catch (err) {
const error: AsarErrorObject = new Error(`EINVAL, ${err.message} while reading ${filePath} in ${asarPath}`);
error.code = 'EINVAL';
error.errno = -22;
throw error;
}
const buffer = Buffer.from(arrayBuffer);
return encoding ? buffer.toString(encoding) : buffer;
};
const { readdir } = fs;
@ -685,12 +689,17 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
return fs.readFileSync(realPath, { encoding: 'utf8' });
}
const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
if (!(fd >= 0)) return [];
logASARAccess(asarPath, filePath, info.offset);
fs.readSync(fd, buffer, 0, info.size, info.offset);
let arrayBuffer: ArrayBuffer;
try {
arrayBuffer = archive.readSync(info.offset, info.size);
} catch (err) {
const error: AsarErrorObject = new Error(`EINVAL, ${err.message} while reading ${filePath} in ${asarPath}`);
error.code = 'EINVAL';
error.errno = -22;
throw error;
}
const buffer = Buffer.from(arrayBuffer);
const str = buffer.toString('utf8');
return [str, str.length > 0];
};

View file

@ -4,6 +4,7 @@
#include <vector>
#include "base/numerics/safe_math.h"
#include "gin/handle.h"
#include "gin/object_template_builder.h"
#include "gin/wrappable.h"
@ -12,6 +13,9 @@
#include "shell/common/gin_converters/callback_converter.h"
#include "shell/common/gin_converters/file_path_converter.h"
#include "shell/common/gin_helper/dictionary.h"
#include "shell/common/gin_helper/error_thrower.h"
#include "shell/common/gin_helper/function_template_extensions.h"
#include "shell/common/gin_helper/promise.h"
#include "shell/common/node_includes.h"
#include "shell/common/node_util.h"
@ -38,7 +42,8 @@ class Archive : public gin::Wrappable<Archive> {
.SetMethod("readdir", &Archive::Readdir)
.SetMethod("realpath", &Archive::Realpath)
.SetMethod("copyFileOut", &Archive::CopyFileOut)
.SetMethod("getFd", &Archive::GetFD);
.SetMethod("read", &Archive::Read)
.SetMethod("readSync", &Archive::ReadSync);
}
const char* GetTypeName() override { return "Archive"; }
@ -104,15 +109,68 @@ class Archive : public gin::Wrappable<Archive> {
return gin::ConvertToV8(isolate, new_path);
}
// Return the file descriptor.
int GetFD() const {
if (!archive_)
return -1;
return archive_->GetFD();
v8::Local<v8::ArrayBuffer> ReadSync(gin_helper::ErrorThrower thrower,
uint64_t offset,
uint64_t length) {
base::CheckedNumeric<uint64_t> safe_offset(offset);
base::CheckedNumeric<uint64_t> safe_end = safe_offset + length;
if (!safe_end.IsValid() ||
safe_end.ValueOrDie() > archive_->file()->length()) {
thrower.ThrowError("Out of bounds read");
return v8::Local<v8::ArrayBuffer>();
}
auto array_buffer = v8::ArrayBuffer::New(thrower.isolate(), length);
auto backing_store = array_buffer->GetBackingStore();
memcpy(backing_store->Data(), archive_->file()->data() + offset, length);
return array_buffer;
}
v8::Local<v8::Promise> Read(v8::Isolate* isolate,
uint64_t offset,
uint64_t length) {
gin_helper::Promise<v8::Local<v8::ArrayBuffer>> promise(isolate);
v8::Local<v8::Promise> handle = promise.GetHandle();
base::CheckedNumeric<uint64_t> safe_offset(offset);
base::CheckedNumeric<uint64_t> safe_end = safe_offset + length;
if (!safe_end.IsValid() ||
safe_end.ValueOrDie() > archive_->file()->length()) {
promise.RejectWithErrorMessage("Out of bounds read");
return handle;
}
auto backing_store = v8::ArrayBuffer::NewBackingStore(isolate, length);
base::ThreadPool::PostTaskAndReplyWithResult(
FROM_HERE, {base::MayBlock(), base::TaskPriority::USER_VISIBLE},
base::BindOnce(&Archive::ReadOnIO, isolate, archive_,
std::move(backing_store), offset, length),
base::BindOnce(&Archive::ResolveReadOnUI, std::move(promise)));
return handle;
}
private:
std::unique_ptr<asar::Archive> archive_;
static std::unique_ptr<v8::BackingStore> ReadOnIO(
v8::Isolate* isolate,
std::shared_ptr<asar::Archive> archive,
std::unique_ptr<v8::BackingStore> backing_store,
uint64_t offset,
uint64_t length) {
memcpy(backing_store->Data(), archive->file()->data() + offset, length);
return backing_store;
}
static void ResolveReadOnUI(
gin_helper::Promise<v8::Local<v8::ArrayBuffer>> promise,
std::unique_ptr<v8::BackingStore> backing_store) {
v8::HandleScope scope(promise.isolate());
v8::Context::Scope context_scope(promise.GetContext());
auto array_buffer =
v8::ArrayBuffer::New(promise.isolate(), std::move(backing_store));
promise.Resolve(array_buffer);
}
std::shared_ptr<asar::Archive> archive_;
DISALLOW_COPY_AND_ASSIGN(Archive);
};

View file

@ -117,78 +117,51 @@ bool FillFileInfoWithNode(Archive::FileInfo* info,
} // namespace
Archive::Archive(const base::FilePath& path)
: path_(path), file_(base::File::FILE_OK) {
Archive::Archive(const base::FilePath& path) : path_(path) {
base::ThreadRestrictions::ScopedAllowIO allow_io;
file_.Initialize(path_, base::File::FLAG_OPEN | base::File::FLAG_READ);
#if defined(OS_WIN)
fd_ = _open_osfhandle(reinterpret_cast<intptr_t>(file_.GetPlatformFile()), 0);
#elif defined(OS_POSIX)
fd_ = file_.GetPlatformFile();
#endif
if (!file_.Initialize(path_)) {
LOG(ERROR) << "Failed to open ASAR archive at '" << path_.value() << "'";
}
}
Archive::~Archive() {
#if defined(OS_WIN)
if (fd_ != -1) {
_close(fd_);
// Don't close the handle since we already closed the fd.
file_.TakePlatformFile();
}
#endif
base::ThreadRestrictions::ScopedAllowIO allow_io;
file_.Close();
}
Archive::~Archive() {}
bool Archive::Init() {
if (!file_.IsValid()) {
if (file_.error_details() != base::File::FILE_ERROR_NOT_FOUND) {
LOG(WARNING) << "Opening " << path_.value() << ": "
<< base::File::ErrorToString(file_.error_details());
}
return false;
}
std::vector<char> buf;
int len;
buf.resize(8);
{
base::ThreadRestrictions::ScopedAllowIO allow_io;
len = file_.ReadAtCurrentPos(buf.data(), buf.size());
}
if (len != static_cast<int>(buf.size())) {
PLOG(ERROR) << "Failed to read header size from " << path_.value();
if (file_.length() < 8) {
LOG(ERROR) << "Malformed ASAR file at '" << path_.value()
<< "' (too short)";
return false;
}
uint32_t size;
if (!base::PickleIterator(base::Pickle(buf.data(), buf.size()))
.ReadUInt32(&size)) {
LOG(ERROR) << "Failed to parse header size from " << path_.value();
base::PickleIterator size_pickle(
base::Pickle(reinterpret_cast<const char*>(file_.data()), 8));
if (!size_pickle.ReadUInt32(&size)) {
LOG(ERROR) << "Failed to read header size at '" << path_.value() << "'";
return false;
}
buf.resize(size);
{
base::ThreadRestrictions::ScopedAllowIO allow_io;
len = file_.ReadAtCurrentPos(buf.data(), buf.size());
}
if (len != static_cast<int>(buf.size())) {
PLOG(ERROR) << "Failed to read header from " << path_.value();
if (file_.length() - 8 < size) {
LOG(ERROR) << "Malformed ASAR file at '" << path_.value()
<< "' (incorrect header)";
return false;
}
base::PickleIterator header_pickle(
base::Pickle(reinterpret_cast<const char*>(file_.data() + 8), size));
std::string header;
if (!base::PickleIterator(base::Pickle(buf.data(), buf.size()))
.ReadString(&header)) {
LOG(ERROR) << "Failed to parse header from " << path_.value();
if (!header_pickle.ReadString(&header)) {
LOG(ERROR) << "Failed to read header string at '" << path_.value() << "'";
return false;
}
base::Optional<base::Value> value = base::JSONReader::Read(header);
if (!value || !value->is_dict()) {
LOG(ERROR) << "Failed to parse header";
LOG(ERROR) << "Header was not valid JSON at '" << path_.value() << "'";
return false;
}
@ -291,11 +264,24 @@ bool Archive::CopyFileOut(const base::FilePath& path, base::FilePath* out) {
return true;
}
base::CheckedNumeric<uint64_t> safe_offset(info.offset);
auto safe_end = safe_offset + info.size;
if (!safe_end.IsValid() || safe_end.ValueOrDie() > file_.length())
return false;
auto temp_file = std::make_unique<ScopedTemporaryFile>();
base::FilePath::StringType ext = path.Extension();
if (!temp_file->InitFromFile(&file_, ext, info.offset, info.size))
if (!temp_file->Init(ext))
return false;
base::File dest(temp_file->path(),
base::File::FLAG_OPEN | base::File::FLAG_WRITE);
if (!dest.IsValid())
return false;
dest.WriteAtCurrentPos(
reinterpret_cast<const char*>(file_.data() + info.offset), info.size);
#if defined(OS_POSIX)
if (info.executable) {
// chmod a+x temp_file;
@ -308,8 +294,4 @@ bool Archive::CopyFileOut(const base::FilePath& path, base::FilePath* out) {
return true;
}
int Archive::GetFD() const {
return fd_;
}
} // namespace asar

View file

@ -11,6 +11,7 @@
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/files/memory_mapped_file.h"
namespace base {
class DictionaryValue;
@ -61,16 +62,13 @@ class Archive {
// For unpacked file, this method will return its real path.
bool CopyFileOut(const base::FilePath& path, base::FilePath* out);
// Returns the file's fd.
int GetFD() const;
base::MemoryMappedFile* file() { return &file_; }
base::FilePath path() const { return path_; }
base::DictionaryValue* header() const { return header_.get(); }
private:
base::FilePath path_;
base::File file_;
int fd_ = -1;
base::MemoryMappedFile file_;
uint32_t header_size_ = 0;
std::unique_ptr<base::DictionaryValue> header_;

View file

@ -48,27 +48,4 @@ bool ScopedTemporaryFile::Init(const base::FilePath::StringType& ext) {
return true;
}
bool ScopedTemporaryFile::InitFromFile(base::File* src,
const base::FilePath::StringType& ext,
uint64_t offset,
uint64_t size) {
if (!src->IsValid())
return false;
if (!Init(ext))
return false;
std::vector<char> buf(size);
int len = src->Read(offset, buf.data(), buf.size());
if (len != static_cast<int>(size))
return false;
base::File dest(path_, base::File::FLAG_OPEN | base::File::FLAG_WRITE);
if (!dest.IsValid())
return false;
return dest.WriteAtCurrentPos(buf.data(), buf.size()) ==
static_cast<int>(size);
}
} // namespace asar

View file

@ -27,12 +27,6 @@ class ScopedTemporaryFile {
// Init an empty temporary file with a certain extension.
bool Init(const base::FilePath::StringType& ext);
// Init an temporary file and fill it with content of |path|.
bool InitFromFile(base::File* src,
const base::FilePath::StringType& ext,
uint64_t offset,
uint64_t size);
base::FilePath path() const { return path_; }
private:

View file

@ -68,7 +68,8 @@ declare namespace NodeJS {
readdir(path: string): string[] | false;
realpath(path: string): string | false;
copyFileOut(path: string): string | false;
getFd(): number | -1;
read(offset: number, size: number): Promise<ArrayBuffer>;
readSync(offset: number, size: number): ArrayBuffer;
}
interface AsarBinding {