Merge pull request #1267 from atom/asar-unpacked
Add support for asar archives with unpacked files
This commit is contained in:
commit
db056f8730
15 changed files with 134 additions and 62 deletions
|
@ -100,32 +100,13 @@ void AdapterRequestJob::CreateBufferJobAndStart(const std::string& mime_type,
|
|||
|
||||
void AdapterRequestJob::CreateFileJobAndStart(const base::FilePath& path) {
|
||||
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
|
||||
|
||||
base::FilePath asar_path, relative_path;
|
||||
if (!asar::GetAsarArchivePath(path, &asar_path, &relative_path)) {
|
||||
real_job_ = new net::URLRequestFileJob(
|
||||
request(),
|
||||
network_delegate(),
|
||||
path,
|
||||
content::BrowserThread::GetBlockingPool()->
|
||||
GetTaskRunnerWithShutdownBehavior(
|
||||
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN));
|
||||
} else {
|
||||
auto archive = asar::GetOrCreateAsarArchive(asar_path);
|
||||
if (archive)
|
||||
real_job_ = new asar::URLRequestAsarJob(
|
||||
request(),
|
||||
network_delegate(),
|
||||
archive,
|
||||
relative_path,
|
||||
content::BrowserThread::GetBlockingPool()->
|
||||
GetTaskRunnerWithShutdownBehavior(
|
||||
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN));
|
||||
else
|
||||
real_job_ = new net::URLRequestErrorJob(
|
||||
request(), network_delegate(), net::ERR_FILE_NOT_FOUND);
|
||||
}
|
||||
|
||||
real_job_ = asar::CreateJobFromPath(
|
||||
path,
|
||||
request(),
|
||||
network_delegate(),
|
||||
content::BrowserThread::GetBlockingPool()->
|
||||
GetTaskRunnerWithShutdownBehavior(
|
||||
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN));
|
||||
real_job_->Start();
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,36 @@
|
|||
|
||||
namespace asar {
|
||||
|
||||
// static
|
||||
net::URLRequestJob* CreateJobFromPath(
|
||||
const base::FilePath& full_path,
|
||||
net::URLRequest* request,
|
||||
net::NetworkDelegate* network_delegate,
|
||||
const scoped_refptr<base::TaskRunner> file_task_runner) {
|
||||
// Create asar:// job when the path contains "xxx.asar/", otherwise treat the
|
||||
// URL request as file://.
|
||||
base::FilePath asar_path, relative_path;
|
||||
if (!GetAsarArchivePath(full_path, &asar_path, &relative_path))
|
||||
return new net::URLRequestFileJob(request, network_delegate, full_path,
|
||||
file_task_runner);
|
||||
|
||||
std::shared_ptr<Archive> archive = GetOrCreateAsarArchive(asar_path);
|
||||
Archive::FileInfo file_info;
|
||||
if (!archive || !archive->GetFileInfo(relative_path, &file_info))
|
||||
return new net::URLRequestErrorJob(request, network_delegate,
|
||||
net::ERR_FILE_NOT_FOUND);
|
||||
|
||||
if (file_info.unpacked) {
|
||||
base::FilePath real_path;
|
||||
archive->CopyFileOut(relative_path, &real_path);
|
||||
return new net::URLRequestFileJob(request, network_delegate, real_path,
|
||||
file_task_runner);
|
||||
}
|
||||
|
||||
return new URLRequestAsarJob(request, network_delegate, archive,
|
||||
relative_path, file_info, file_task_runner);
|
||||
}
|
||||
|
||||
AsarProtocolHandler::AsarProtocolHandler(
|
||||
const scoped_refptr<base::TaskRunner>& file_task_runner)
|
||||
: file_task_runner_(file_task_runner) {}
|
||||
|
@ -26,21 +56,8 @@ net::URLRequestJob* AsarProtocolHandler::MaybeCreateJob(
|
|||
net::NetworkDelegate* network_delegate) const {
|
||||
base::FilePath full_path;
|
||||
net::FileURLToFilePath(request->url(), &full_path);
|
||||
|
||||
// Create asar:// job when the path contains "xxx.asar/", otherwise treat the
|
||||
// URL request as file://.
|
||||
base::FilePath asar_path, relative_path;
|
||||
if (!GetAsarArchivePath(full_path, &asar_path, &relative_path))
|
||||
return new net::URLRequestFileJob(request, network_delegate, full_path,
|
||||
file_task_runner_);
|
||||
|
||||
std::shared_ptr<Archive> archive = GetOrCreateAsarArchive(asar_path);
|
||||
if (!archive)
|
||||
return new net::URLRequestErrorJob(request, network_delegate,
|
||||
net::ERR_FILE_NOT_FOUND);
|
||||
|
||||
return new URLRequestAsarJob(request, network_delegate, archive,
|
||||
relative_path, file_task_runner_);
|
||||
return CreateJobFromPath(full_path, request, network_delegate,
|
||||
file_task_runner_);
|
||||
}
|
||||
|
||||
bool AsarProtocolHandler::IsSafeRedirectTarget(const GURL& location) const {
|
||||
|
|
|
@ -19,10 +19,12 @@ URLRequestAsarJob::URLRequestAsarJob(
|
|||
net::NetworkDelegate* network_delegate,
|
||||
std::shared_ptr<Archive> archive,
|
||||
const base::FilePath& file_path,
|
||||
const Archive::FileInfo& file_info,
|
||||
const scoped_refptr<base::TaskRunner>& file_task_runner)
|
||||
: net::URLRequestJob(request, network_delegate),
|
||||
archive_(archive),
|
||||
file_path_(file_path),
|
||||
file_info_(file_info),
|
||||
stream_(new net::FileStream(file_task_runner)),
|
||||
remaining_bytes_(0),
|
||||
file_task_runner_(file_task_runner),
|
||||
|
@ -31,12 +33,6 @@ URLRequestAsarJob::URLRequestAsarJob(
|
|||
URLRequestAsarJob::~URLRequestAsarJob() {}
|
||||
|
||||
void URLRequestAsarJob::Start() {
|
||||
if (!archive_ || !archive_->GetFileInfo(file_path_, &file_info_)) {
|
||||
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
||||
net::ERR_FILE_NOT_FOUND));
|
||||
return;
|
||||
}
|
||||
|
||||
remaining_bytes_ = static_cast<int64>(file_info_.size);
|
||||
|
||||
int flags = base::File::FLAG_OPEN |
|
||||
|
|
|
@ -24,12 +24,20 @@ class FileStream;
|
|||
|
||||
namespace asar {
|
||||
|
||||
// Createa a request job according to the file path.
|
||||
net::URLRequestJob* CreateJobFromPath(
|
||||
const base::FilePath& full_path,
|
||||
net::URLRequest* request,
|
||||
net::NetworkDelegate* network_delegate,
|
||||
const scoped_refptr<base::TaskRunner> file_task_runner);
|
||||
|
||||
class URLRequestAsarJob : public net::URLRequestJob {
|
||||
public:
|
||||
URLRequestAsarJob(net::URLRequest* request,
|
||||
net::NetworkDelegate* network_delegate,
|
||||
std::shared_ptr<Archive> archive,
|
||||
const base::FilePath& file_path,
|
||||
const Archive::FileInfo& file_info,
|
||||
const scoped_refptr<base::TaskRunner>& file_task_runner);
|
||||
|
||||
// net::URLRequestJob:
|
||||
|
@ -55,8 +63,8 @@ class URLRequestAsarJob : public net::URLRequestJob {
|
|||
void DidRead(scoped_refptr<net::IOBuffer> buf, int result);
|
||||
|
||||
std::shared_ptr<Archive> archive_;
|
||||
Archive::FileInfo file_info_;
|
||||
base::FilePath file_path_;
|
||||
Archive::FileInfo file_info_;
|
||||
|
||||
scoped_ptr<net::FileStream> stream_;
|
||||
int64 remaining_bytes_;
|
||||
|
|
|
@ -41,6 +41,7 @@ class Archive : public mate::Wrappable {
|
|||
return v8::False(isolate);
|
||||
mate::Dictionary dict(isolate, v8::Object::New(isolate));
|
||||
dict.Set("size", info.size);
|
||||
dict.Set("unpacked", info.unpacked);
|
||||
dict.Set("offset", info.offset);
|
||||
return dict.GetHandle();
|
||||
}
|
||||
|
|
|
@ -81,18 +81,22 @@ bool GetNodeFromPath(std::string path,
|
|||
bool FillFileInfoWithNode(Archive::FileInfo* info,
|
||||
uint32 header_size,
|
||||
const base::DictionaryValue* node) {
|
||||
int size;
|
||||
if (!node->GetInteger("size", &size))
|
||||
return false;
|
||||
info->size = static_cast<uint32>(size);
|
||||
|
||||
info->unpacked = false;
|
||||
if (node->GetBoolean("unpacked", &info->unpacked) && info->unpacked)
|
||||
return true;
|
||||
|
||||
std::string offset;
|
||||
if (!node->GetString("offset", &offset))
|
||||
return false;
|
||||
if (!base::StringToUint64(offset, &info->offset))
|
||||
return false;
|
||||
|
||||
int size;
|
||||
if (!node->GetInteger("size", &size))
|
||||
return false;
|
||||
|
||||
info->offset += header_size;
|
||||
info->size = static_cast<uint32>(size);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -240,6 +244,11 @@ bool Archive::CopyFileOut(const base::FilePath& path, base::FilePath* out) {
|
|||
if (!GetFileInfo(path, &info))
|
||||
return false;
|
||||
|
||||
if (info.unpacked) {
|
||||
*out = path_.AddExtension(FILE_PATH_LITERAL("unpacked")).Append(path);
|
||||
return true;
|
||||
}
|
||||
|
||||
scoped_ptr<ScopedTemporaryFile> temp_file(new ScopedTemporaryFile);
|
||||
if (!temp_file->InitFromFile(path_, info.offset, info.size))
|
||||
return false;
|
||||
|
|
|
@ -25,6 +25,7 @@ class Archive {
|
|||
public:
|
||||
struct FileInfo {
|
||||
FileInfo() : size(0), offset(0) {}
|
||||
bool unpacked;
|
||||
uint32 size;
|
||||
uint64 offset;
|
||||
};
|
||||
|
@ -55,6 +56,7 @@ class Archive {
|
|||
bool Realpath(const base::FilePath& path, base::FilePath* realpath);
|
||||
|
||||
// Copy the file into a temporary file, and return the new path.
|
||||
// For unpacked file, this method will return its real path.
|
||||
bool CopyFileOut(const base::FilePath& path, base::FilePath* out);
|
||||
|
||||
base::FilePath path() const { return path_; }
|
||||
|
|
|
@ -71,6 +71,13 @@ bool ReadFileToString(const base::FilePath& path, std::string* contents) {
|
|||
if (!archive->GetFileInfo(relative_path, &info))
|
||||
return false;
|
||||
|
||||
if (info.unpacked) {
|
||||
base::FilePath real_path;
|
||||
// For unpacked file it will return the real path instead of doing the copy.
|
||||
archive->CopyFileOut(relative_path, &real_path);
|
||||
return base::ReadFileToString(real_path, contents);
|
||||
}
|
||||
|
||||
base::File src(asar_path, base::File::FLAG_OPEN | base::File::FLAG_READ);
|
||||
if (!src.IsValid())
|
||||
return false;
|
||||
|
|
|
@ -90,7 +90,7 @@ overrideAPI = (module, name, arg = 0) ->
|
|||
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||
|
||||
newPath = archive.copyFileOut filePath
|
||||
return callback createNotFoundError(asarPath, filePath) unless newPath
|
||||
return callback createNotFoundError(asarPath, filePath) unless newPath
|
||||
|
||||
arguments[arg] = newPath
|
||||
old.apply this, arguments
|
||||
|
@ -218,6 +218,10 @@ exports.wrapFsWithAsar = (fs) ->
|
|||
info = archive.getFileInfo filePath
|
||||
return callback createNotFoundError(asarPath, filePath) unless info
|
||||
|
||||
if info.unpacked
|
||||
realPath = archive.copyFileOut filePath
|
||||
return fs.readFile realPath, options, callback
|
||||
|
||||
if not options
|
||||
options = encoding: null, flag: 'r'
|
||||
else if util.isString options
|
||||
|
@ -247,6 +251,10 @@ exports.wrapFsWithAsar = (fs) ->
|
|||
info = archive.getFileInfo filePath
|
||||
throw createNotFoundError(asarPath, filePath) unless info
|
||||
|
||||
if info.unpacked
|
||||
realPath = archive.copyFileOut filePath
|
||||
return fs.readFileSync realPath, options
|
||||
|
||||
if not options
|
||||
options = encoding: null, flag: 'r'
|
||||
else if util.isString options
|
||||
|
@ -298,4 +306,3 @@ exports.wrapFsWithAsar = (fs) ->
|
|||
overrideAPISync process, 'dlopen', 1
|
||||
overrideAPISync require('module')._extensions, '.node', 1
|
||||
overrideAPISync fs, 'openSync'
|
||||
overrideAPISync child_process, 'fork'
|
||||
|
|
|
@ -131,7 +131,6 @@ work. This adds a little overhead for those APIs.
|
|||
APIs that requires extra unpacking are:
|
||||
|
||||
* `child_process.execFile`
|
||||
* `child_process.fork`
|
||||
* `fs.open`
|
||||
* `fs.openSync`
|
||||
* `process.dlopen` - Used by `require` on native modules
|
||||
|
@ -143,4 +142,22 @@ archives is generated by guessing, because those files do not exist on the
|
|||
filesystem. So you should not trust the `Stats` object except for getting file
|
||||
size and checking file type.
|
||||
|
||||
## Adding unpacked files in `asar` archive
|
||||
|
||||
As stated above, some Node APIs will unpack the file to filesystem when
|
||||
calling, apart from the performance issues, it could also lead to false alerts
|
||||
of virus scanners.
|
||||
|
||||
To work around this, you can unpack some files creating archives by using the
|
||||
`--unpack` option, an example of excluding shared libraries of native modules
|
||||
is:
|
||||
|
||||
```bash
|
||||
$ asar pack app app.asar --unpack *.node
|
||||
```
|
||||
|
||||
After running the command, apart from the `app.asar`, there is also an
|
||||
`app.asar.unpacked` folder generated which contains the unpacked files, you
|
||||
should copy it together with `app.asar` when shipping it to users.
|
||||
|
||||
[asar]: https://github.com/atom/asar
|
||||
|
|
|
@ -110,6 +110,26 @@ describe 'protocol module', ->
|
|||
assert false, 'Got error: ' + errorType + ' ' + error
|
||||
protocol.unregisterProtocol 'atom-file-job'
|
||||
|
||||
it 'returns RequestFileJob should send file from asar archive with unpacked file', (done) ->
|
||||
p = path.join __dirname, 'fixtures', 'asar', 'unpack.asar', 'a.txt'
|
||||
job = new protocol.RequestFileJob(p)
|
||||
handler = remote.createFunctionWithReturnValue job
|
||||
protocol.registerProtocol 'atom-file-job', handler
|
||||
|
||||
$.ajax
|
||||
url: 'atom-file-job://' + p
|
||||
success: (response) ->
|
||||
data = require('fs').readFileSync(p)
|
||||
assert.equal response.length, data.length
|
||||
buf = new Buffer(response.length)
|
||||
buf.write(response)
|
||||
assert buf.equals(data)
|
||||
protocol.unregisterProtocol 'atom-file-job'
|
||||
done()
|
||||
error: (xhr, errorType, error) ->
|
||||
assert false, 'Got error: ' + errorType + ' ' + error
|
||||
protocol.unregisterProtocol 'atom-file-job'
|
||||
|
||||
describe 'protocol.isHandledProtocol', ->
|
||||
it 'returns true if the scheme can be handled', ->
|
||||
assert.equal protocol.isHandledProtocol('file'), true
|
||||
|
|
|
@ -342,11 +342,6 @@ describe 'asar package', ->
|
|||
done()
|
||||
child.send 'message'
|
||||
|
||||
it 'throws ENOENT error when can not find file', ->
|
||||
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||
throws = -> child_process.fork p
|
||||
assert.throws throws, /ENOENT/
|
||||
|
||||
it 'supports asar in the forked js', (done) ->
|
||||
file = path.join fixtures, 'asar', 'a.asar', 'file1'
|
||||
child = child_process.fork path.join(fixtures, 'module', 'asar.js')
|
||||
|
@ -367,6 +362,12 @@ describe 'asar package', ->
|
|||
assert.equal data, 'file1\n'
|
||||
done()
|
||||
|
||||
it 'can request a file in package with unpacked files', (done) ->
|
||||
p = path.resolve fixtures, 'asar', 'unpack.asar', 'a.txt'
|
||||
$.get "file://#{p}", (data) ->
|
||||
assert.equal data, 'a\n'
|
||||
done()
|
||||
|
||||
it 'can request a linked file in package', (done) ->
|
||||
p = path.resolve fixtures, 'asar', 'a.asar', 'link2', 'link1'
|
||||
$.get "file://#{p}", (data) ->
|
||||
|
@ -436,3 +437,8 @@ describe 'asar package', ->
|
|||
p = path.join fixtures, 'asar', 'logo.asar', 'logo.png'
|
||||
logo = require('native-image').createFromPath p
|
||||
assert.deepEqual logo.getSize(), {width: 55, height: 55}
|
||||
|
||||
it 'reads image from asar archive with unpacked files', ->
|
||||
p = path.join fixtures, 'asar', 'unpack.asar', 'atom.png'
|
||||
logo = require('native-image').createFromPath p
|
||||
assert.deepEqual logo.getSize(), {width: 1024, height: 1024}
|
||||
|
|
BIN
spec/fixtures/asar/unpack.asar
vendored
Normal file
BIN
spec/fixtures/asar/unpack.asar
vendored
Normal file
Binary file not shown.
1
spec/fixtures/asar/unpack.asar.unpacked/a.txt
vendored
Normal file
1
spec/fixtures/asar/unpack.asar.unpacked/a.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
a
|
BIN
spec/fixtures/asar/unpack.asar.unpacked/atom.png
vendored
Normal file
BIN
spec/fixtures/asar/unpack.asar.unpacked/atom.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 628 KiB |
Loading…
Reference in a new issue