Merge pull request #665 from atom/asar
Support loading apps in asar format
This commit is contained in:
commit
9d84f139eb
29 changed files with 1793 additions and 31 deletions
10
atom.gyp
10
atom.gyp
|
@ -39,6 +39,7 @@
|
||||||
'atom/common/api/lib/screen.coffee',
|
'atom/common/api/lib/screen.coffee',
|
||||||
'atom/common/api/lib/shell.coffee',
|
'atom/common/api/lib/shell.coffee',
|
||||||
'atom/common/lib/init.coffee',
|
'atom/common/lib/init.coffee',
|
||||||
|
'atom/common/lib/asar.coffee',
|
||||||
'atom/renderer/lib/chrome-api.coffee',
|
'atom/renderer/lib/chrome-api.coffee',
|
||||||
'atom/renderer/lib/init.coffee',
|
'atom/renderer/lib/init.coffee',
|
||||||
'atom/renderer/lib/inspector.coffee',
|
'atom/renderer/lib/inspector.coffee',
|
||||||
|
@ -121,6 +122,10 @@
|
||||||
'atom/browser/native_window_observer.h',
|
'atom/browser/native_window_observer.h',
|
||||||
'atom/browser/net/adapter_request_job.cc',
|
'atom/browser/net/adapter_request_job.cc',
|
||||||
'atom/browser/net/adapter_request_job.h',
|
'atom/browser/net/adapter_request_job.h',
|
||||||
|
'atom/browser/net/asar/asar_protocol_handler.cc',
|
||||||
|
'atom/browser/net/asar/asar_protocol_handler.h',
|
||||||
|
'atom/browser/net/asar/url_request_asar_job.cc',
|
||||||
|
'atom/browser/net/asar/url_request_asar_job.h',
|
||||||
'atom/browser/net/atom_url_request_job_factory.cc',
|
'atom/browser/net/atom_url_request_job_factory.cc',
|
||||||
'atom/browser/net/atom_url_request_job_factory.h',
|
'atom/browser/net/atom_url_request_job_factory.h',
|
||||||
'atom/browser/net/url_request_string_job.cc',
|
'atom/browser/net/url_request_string_job.cc',
|
||||||
|
@ -174,6 +179,7 @@
|
||||||
'atom/browser/window_list.h',
|
'atom/browser/window_list.h',
|
||||||
'atom/browser/window_list_observer.h',
|
'atom/browser/window_list_observer.h',
|
||||||
'atom/common/api/api_messages.h',
|
'atom/common/api/api_messages.h',
|
||||||
|
'atom/common/api/atom_api_asar.cc',
|
||||||
'atom/common/api/atom_api_clipboard.cc',
|
'atom/common/api/atom_api_clipboard.cc',
|
||||||
'atom/common/api/atom_api_crash_reporter.cc',
|
'atom/common/api/atom_api_crash_reporter.cc',
|
||||||
'atom/common/api/atom_api_id_weak_map.cc',
|
'atom/common/api/atom_api_id_weak_map.cc',
|
||||||
|
@ -186,6 +192,10 @@
|
||||||
'atom/common/api/atom_bindings.h',
|
'atom/common/api/atom_bindings.h',
|
||||||
'atom/common/api/object_life_monitor.cc',
|
'atom/common/api/object_life_monitor.cc',
|
||||||
'atom/common/api/object_life_monitor.h',
|
'atom/common/api/object_life_monitor.h',
|
||||||
|
'atom/common/asar/archive.cc',
|
||||||
|
'atom/common/asar/archive.h',
|
||||||
|
'atom/common/asar/scoped_temporary_file.cc',
|
||||||
|
'atom/common/asar/scoped_temporary_file.h',
|
||||||
'atom/common/common_message_generator.cc',
|
'atom/common/common_message_generator.cc',
|
||||||
'atom/common/common_message_generator.h',
|
'atom/common/common_message_generator.h',
|
||||||
'atom/common/crash_reporter/crash_reporter.cc',
|
'atom/common/crash_reporter/crash_reporter.cc',
|
||||||
|
|
|
@ -91,6 +91,10 @@ void App::OnWindowAllClosed() {
|
||||||
Emit("window-all-closed");
|
Emit("window-all-closed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void App::OnQuit() {
|
||||||
|
Emit("quit");
|
||||||
|
}
|
||||||
|
|
||||||
void App::OnOpenFile(bool* prevent_default, const std::string& file_path) {
|
void App::OnOpenFile(bool* prevent_default, const std::string& file_path) {
|
||||||
base::ListValue args;
|
base::ListValue args;
|
||||||
args.AppendString(file_path);
|
args.AppendString(file_path);
|
||||||
|
|
|
@ -36,6 +36,7 @@ class App : public mate::EventEmitter,
|
||||||
// BrowserObserver implementations:
|
// BrowserObserver implementations:
|
||||||
virtual void OnWillQuit(bool* prevent_default) OVERRIDE;
|
virtual void OnWillQuit(bool* prevent_default) OVERRIDE;
|
||||||
virtual void OnWindowAllClosed() OVERRIDE;
|
virtual void OnWindowAllClosed() OVERRIDE;
|
||||||
|
virtual void OnQuit() OVERRIDE;
|
||||||
virtual void OnOpenFile(bool* prevent_default,
|
virtual void OnOpenFile(bool* prevent_default,
|
||||||
const std::string& file_path) OVERRIDE;
|
const std::string& file_path) OVERRIDE;
|
||||||
virtual void OnOpenURL(const std::string& url) OVERRIDE;
|
virtual void OnOpenURL(const std::string& url) OVERRIDE;
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
|
|
||||||
#include "atom/browser/atom_browser_main_parts.h"
|
#include "atom/browser/atom_browser_main_parts.h"
|
||||||
#include "atom/browser/net/atom_url_request_job_factory.h"
|
#include "atom/browser/net/atom_url_request_job_factory.h"
|
||||||
|
#include "atom/browser/net/asar/asar_protocol_handler.h"
|
||||||
#include "base/threading/sequenced_worker_pool.h"
|
#include "base/threading/sequenced_worker_pool.h"
|
||||||
#include "base/threading/worker_pool.h"
|
#include "base/threading/worker_pool.h"
|
||||||
#include "chrome/browser/browser_process.h"
|
#include "chrome/browser/browser_process.h"
|
||||||
|
@ -20,6 +21,12 @@ using content::BrowserThread;
|
||||||
|
|
||||||
namespace atom {
|
namespace atom {
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
const char* kAsarScheme = "asar";
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
AtomBrowserContext::AtomBrowserContext()
|
AtomBrowserContext::AtomBrowserContext()
|
||||||
: fake_browser_process_(new BrowserProcess),
|
: fake_browser_process_(new BrowserProcess),
|
||||||
job_factory_(new AtomURLRequestJobFactory) {
|
job_factory_(new AtomURLRequestJobFactory) {
|
||||||
|
@ -44,6 +51,10 @@ net::URLRequestJobFactory* AtomBrowserContext::CreateURLRequestJobFactory(
|
||||||
url::kFileScheme, new net::FileProtocolHandler(
|
url::kFileScheme, new net::FileProtocolHandler(
|
||||||
BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior(
|
BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior(
|
||||||
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN)));
|
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN)));
|
||||||
|
job_factory->SetProtocolHandler(
|
||||||
|
kAsarScheme, new asar::AsarProtocolHandler(
|
||||||
|
BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior(
|
||||||
|
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN)));
|
||||||
|
|
||||||
// Set up interceptors in the reverse order.
|
// Set up interceptors in the reverse order.
|
||||||
scoped_ptr<net::URLRequestJobFactory> top_job_factory =
|
scoped_ptr<net::URLRequestJobFactory> top_job_factory =
|
||||||
|
|
|
@ -37,6 +37,8 @@ void Browser::Quit() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Browser::Shutdown() {
|
void Browser::Shutdown() {
|
||||||
|
FOR_EACH_OBSERVER(BrowserObserver, observers_, OnQuit());
|
||||||
|
|
||||||
is_quiting_ = true;
|
is_quiting_ = true;
|
||||||
base::MessageLoop::current()->Quit();
|
base::MessageLoop::current()->Quit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,9 @@ class BrowserObserver {
|
||||||
// method will not be called, instead it will call OnWillQuit.
|
// method will not be called, instead it will call OnWillQuit.
|
||||||
virtual void OnWindowAllClosed() {}
|
virtual void OnWindowAllClosed() {}
|
||||||
|
|
||||||
|
// The browser is quitting.
|
||||||
|
virtual void OnQuit() {}
|
||||||
|
|
||||||
// The browser has opened a file by double clicking in Finder or dragging the
|
// The browser has opened a file by double clicking in Finder or dragging the
|
||||||
// file to the Dock icon. (OS X only)
|
// file to the Dock icon. (OS X only)
|
||||||
virtual void OnOpenFile(bool* prevent_default,
|
virtual void OnOpenFile(bool* prevent_default,
|
||||||
|
|
|
@ -52,20 +52,25 @@ setImmediate ->
|
||||||
detail: message
|
detail: message
|
||||||
buttons: ['OK']
|
buttons: ['OK']
|
||||||
|
|
||||||
|
# Emit 'exit' event on quit.
|
||||||
|
require('app').on 'quit', ->
|
||||||
|
process.emit 'exit'
|
||||||
|
|
||||||
# Load the RPC server.
|
# Load the RPC server.
|
||||||
require './rpc-server.js'
|
require './rpc-server.js'
|
||||||
|
|
||||||
# Now we try to load app's package.json.
|
# Now we try to load app's package.json.
|
||||||
packageJson = null
|
packageJson = null
|
||||||
|
|
||||||
packagePath = path.join process.resourcesPath, 'app'
|
searchPaths = [ 'app', 'app.asar', 'default_app' ]
|
||||||
try
|
for packagePath in searchPaths
|
||||||
# First we try to load process.resourcesPath/app
|
try
|
||||||
packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json')))
|
packagePath = path.join process.resourcesPath, packagePath
|
||||||
catch error
|
packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json')))
|
||||||
# If not found then we load browser/default_app
|
catch e
|
||||||
packagePath = path.join process.resourcesPath, 'default_app'
|
continue
|
||||||
packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json')))
|
|
||||||
|
throw new Error("Unable to find a valid app") unless packageJson?
|
||||||
|
|
||||||
# Set application's version.
|
# Set application's version.
|
||||||
app = require 'app'
|
app = require 'app'
|
||||||
|
@ -80,7 +85,7 @@ setImmediate ->
|
||||||
# Set application's desktop name.
|
# Set application's desktop name.
|
||||||
if packageJson.desktopName?
|
if packageJson.desktopName?
|
||||||
app.setDesktopName packageJson.desktopName
|
app.setDesktopName packageJson.desktopName
|
||||||
else
|
else
|
||||||
app.setDesktopName '#{app.getName()}.desktop'
|
app.setDesktopName '#{app.getName()}.desktop'
|
||||||
|
|
||||||
# Load the chrome extension support.
|
# Load the chrome extension support.
|
||||||
|
|
91
atom/browser/net/asar/asar_protocol_handler.cc
Normal file
91
atom/browser/net/asar/asar_protocol_handler.cc
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include "atom/browser/net/asar/asar_protocol_handler.h"
|
||||||
|
|
||||||
|
#include "atom/browser/net/asar/url_request_asar_job.h"
|
||||||
|
#include "atom/common/asar/archive.h"
|
||||||
|
#include "net/base/filename_util.h"
|
||||||
|
#include "net/base/net_errors.h"
|
||||||
|
#include "net/url_request/url_request_error_job.h"
|
||||||
|
#include "net/url_request/url_request_file_job.h"
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
const base::FilePath::CharType kAsarExtension[] = FILE_PATH_LITERAL(".asar");
|
||||||
|
|
||||||
|
// Get the relative path in asar archive.
|
||||||
|
bool GetAsarPath(const base::FilePath& full_path,
|
||||||
|
base::FilePath* asar_path,
|
||||||
|
base::FilePath* relative_path) {
|
||||||
|
base::FilePath iter = full_path;
|
||||||
|
while (true) {
|
||||||
|
base::FilePath dirname = iter.DirName();
|
||||||
|
if (iter.MatchesExtension(kAsarExtension))
|
||||||
|
break;
|
||||||
|
else if (iter == dirname)
|
||||||
|
return false;
|
||||||
|
iter = dirname;
|
||||||
|
}
|
||||||
|
|
||||||
|
base::FilePath tail;
|
||||||
|
if (!iter.AppendRelativePath(full_path, &tail))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
*asar_path = iter;
|
||||||
|
*relative_path = tail;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
AsarProtocolHandler::AsarProtocolHandler(
|
||||||
|
const scoped_refptr<base::TaskRunner>& file_task_runner)
|
||||||
|
: file_task_runner_(file_task_runner) {}
|
||||||
|
|
||||||
|
AsarProtocolHandler::~AsarProtocolHandler() {
|
||||||
|
}
|
||||||
|
|
||||||
|
Archive* AsarProtocolHandler::GetOrCreateAsarArchive(
|
||||||
|
const base::FilePath& path) const {
|
||||||
|
if (!archives_.contains(path)) {
|
||||||
|
scoped_ptr<Archive> archive(new Archive(path));
|
||||||
|
if (!archive->Init())
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
archives_.set(path, archive.Pass());
|
||||||
|
}
|
||||||
|
|
||||||
|
return archives_.get(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
net::URLRequestJob* AsarProtocolHandler::MaybeCreateJob(
|
||||||
|
net::URLRequest* request,
|
||||||
|
net::NetworkDelegate* network_delegate) const {
|
||||||
|
base::FilePath full_path;
|
||||||
|
net::FileURLToFilePath(request->url(), &full_path);
|
||||||
|
|
||||||
|
// Create asar:// job when the path contains "xxx.asar/", otherwise treat the
|
||||||
|
// URL request as file://.
|
||||||
|
base::FilePath asar_path, relative_path;
|
||||||
|
if (!GetAsarPath(full_path, &asar_path, &relative_path))
|
||||||
|
return new net::URLRequestFileJob(request, network_delegate, full_path,
|
||||||
|
file_task_runner_);
|
||||||
|
|
||||||
|
Archive* archive = GetOrCreateAsarArchive(asar_path);
|
||||||
|
if (!archive)
|
||||||
|
return new net::URLRequestErrorJob(request, network_delegate,
|
||||||
|
net::ERR_FILE_NOT_FOUND);
|
||||||
|
|
||||||
|
return new URLRequestAsarJob(request, network_delegate, archive,
|
||||||
|
relative_path, file_task_runner_);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AsarProtocolHandler::IsSafeRedirectTarget(const GURL& location) const {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace asar
|
45
atom/browser/net/asar/asar_protocol_handler.h
Normal file
45
atom/browser/net/asar/asar_protocol_handler.h
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#ifndef ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_
|
||||||
|
#define ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_
|
||||||
|
|
||||||
|
#include "base/containers/scoped_ptr_hash_map.h"
|
||||||
|
#include "base/files/file_path.h"
|
||||||
|
#include "base/memory/ref_counted.h"
|
||||||
|
#include "net/url_request/url_request_job_factory.h"
|
||||||
|
|
||||||
|
namespace base {
|
||||||
|
class TaskRunner;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
class Archive;
|
||||||
|
|
||||||
|
class AsarProtocolHandler : public net::URLRequestJobFactory::ProtocolHandler {
|
||||||
|
public:
|
||||||
|
explicit AsarProtocolHandler(
|
||||||
|
const scoped_refptr<base::TaskRunner>& file_task_runner);
|
||||||
|
virtual ~AsarProtocolHandler();
|
||||||
|
|
||||||
|
Archive* GetOrCreateAsarArchive(const base::FilePath& path) const;
|
||||||
|
|
||||||
|
// net::URLRequestJobFactory::ProtocolHandler:
|
||||||
|
virtual net::URLRequestJob* MaybeCreateJob(
|
||||||
|
net::URLRequest* request,
|
||||||
|
net::NetworkDelegate* network_delegate) const OVERRIDE;
|
||||||
|
virtual bool IsSafeRedirectTarget(const GURL& location) const OVERRIDE;
|
||||||
|
|
||||||
|
private:
|
||||||
|
const scoped_refptr<base::TaskRunner> file_task_runner_;
|
||||||
|
|
||||||
|
mutable base::ScopedPtrHashMap<base::FilePath, Archive> archives_;
|
||||||
|
|
||||||
|
DISALLOW_COPY_AND_ASSIGN(AsarProtocolHandler);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace asar
|
||||||
|
|
||||||
|
#endif // ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_
|
142
atom/browser/net/asar/url_request_asar_job.cc
Normal file
142
atom/browser/net/asar/url_request_asar_job.cc
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include "atom/browser/net/asar/url_request_asar_job.h"
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "net/base/file_stream.h"
|
||||||
|
#include "net/base/io_buffer.h"
|
||||||
|
#include "net/base/mime_util.h"
|
||||||
|
#include "net/base/net_errors.h"
|
||||||
|
#include "net/url_request/url_request_status.h"
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
URLRequestAsarJob::URLRequestAsarJob(
|
||||||
|
net::URLRequest* request,
|
||||||
|
net::NetworkDelegate* network_delegate,
|
||||||
|
Archive* archive,
|
||||||
|
const base::FilePath& file_path,
|
||||||
|
const scoped_refptr<base::TaskRunner>& file_task_runner)
|
||||||
|
: net::URLRequestJob(request, network_delegate),
|
||||||
|
archive_(archive),
|
||||||
|
file_path_(file_path),
|
||||||
|
stream_(new net::FileStream(file_task_runner)),
|
||||||
|
remaining_bytes_(0),
|
||||||
|
file_task_runner_(file_task_runner),
|
||||||
|
weak_ptr_factory_(this) {}
|
||||||
|
|
||||||
|
URLRequestAsarJob::~URLRequestAsarJob() {}
|
||||||
|
|
||||||
|
void URLRequestAsarJob::Start() {
|
||||||
|
if (!archive_ || !archive_->GetFileInfo(file_path_, &file_info_)) {
|
||||||
|
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
||||||
|
net::ERR_FILE_NOT_FOUND));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
remaining_bytes_ = static_cast<int64>(file_info_.size);
|
||||||
|
|
||||||
|
int flags = base::File::FLAG_OPEN |
|
||||||
|
base::File::FLAG_READ |
|
||||||
|
base::File::FLAG_ASYNC;
|
||||||
|
int rv = stream_->Open(archive_->path(), flags,
|
||||||
|
base::Bind(&URLRequestAsarJob::DidOpen,
|
||||||
|
weak_ptr_factory_.GetWeakPtr()));
|
||||||
|
if (rv != net::ERR_IO_PENDING)
|
||||||
|
DidOpen(rv);
|
||||||
|
}
|
||||||
|
|
||||||
|
void URLRequestAsarJob::Kill() {
|
||||||
|
weak_ptr_factory_.InvalidateWeakPtrs();
|
||||||
|
URLRequestJob::Kill();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool URLRequestAsarJob::ReadRawData(net::IOBuffer* dest,
|
||||||
|
int dest_size,
|
||||||
|
int* bytes_read) {
|
||||||
|
if (remaining_bytes_ < dest_size)
|
||||||
|
dest_size = static_cast<int>(remaining_bytes_);
|
||||||
|
|
||||||
|
// If we should copy zero bytes because |remaining_bytes_| is zero, short
|
||||||
|
// circuit here.
|
||||||
|
if (!dest_size) {
|
||||||
|
*bytes_read = 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
int rv = stream_->Read(dest,
|
||||||
|
dest_size,
|
||||||
|
base::Bind(&URLRequestAsarJob::DidRead,
|
||||||
|
weak_ptr_factory_.GetWeakPtr(),
|
||||||
|
make_scoped_refptr(dest)));
|
||||||
|
if (rv >= 0) {
|
||||||
|
// Data is immediately available.
|
||||||
|
*bytes_read = rv;
|
||||||
|
remaining_bytes_ -= rv;
|
||||||
|
DCHECK_GE(remaining_bytes_, 0);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, a read error occured. We may just need to wait...
|
||||||
|
if (rv == net::ERR_IO_PENDING) {
|
||||||
|
SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
|
||||||
|
} else {
|
||||||
|
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv));
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool URLRequestAsarJob::GetMimeType(std::string* mime_type) const {
|
||||||
|
return net::GetMimeTypeFromFile(file_path_, mime_type);
|
||||||
|
}
|
||||||
|
|
||||||
|
void URLRequestAsarJob::DidOpen(int result) {
|
||||||
|
if (result != net::OK) {
|
||||||
|
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int rv = stream_->Seek(net::FROM_BEGIN,
|
||||||
|
file_info_.offset,
|
||||||
|
base::Bind(&URLRequestAsarJob::DidSeek,
|
||||||
|
weak_ptr_factory_.GetWeakPtr()));
|
||||||
|
if (rv != net::ERR_IO_PENDING) {
|
||||||
|
// stream_->Seek() failed, so pass an intentionally erroneous value
|
||||||
|
// into DidSeek().
|
||||||
|
DidSeek(-1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void URLRequestAsarJob::DidSeek(int64 result) {
|
||||||
|
if (result != static_cast<int64>(file_info_.offset)) {
|
||||||
|
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
||||||
|
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
set_expected_content_size(remaining_bytes_);
|
||||||
|
NotifyHeadersComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
void URLRequestAsarJob::DidRead(scoped_refptr<net::IOBuffer> buf, int result) {
|
||||||
|
if (result > 0) {
|
||||||
|
SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status
|
||||||
|
remaining_bytes_ -= result;
|
||||||
|
DCHECK_GE(remaining_bytes_, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
buf = NULL;
|
||||||
|
|
||||||
|
if (result == 0) {
|
||||||
|
NotifyDone(net::URLRequestStatus());
|
||||||
|
} else if (result < 0) {
|
||||||
|
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
|
||||||
|
}
|
||||||
|
|
||||||
|
NotifyReadComplete(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace asar
|
72
atom/browser/net/asar/url_request_asar_job.h
Normal file
72
atom/browser/net/asar/url_request_asar_job.h
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#ifndef ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_
|
||||||
|
#define ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "atom/common/asar/archive.h"
|
||||||
|
#include "base/files/file_path.h"
|
||||||
|
#include "base/memory/ref_counted.h"
|
||||||
|
#include "base/memory/weak_ptr.h"
|
||||||
|
#include "net/url_request/url_request_job.h"
|
||||||
|
|
||||||
|
namespace base {
|
||||||
|
class TaskRunner;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace net {
|
||||||
|
class FileStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
class URLRequestAsarJob : public net::URLRequestJob {
|
||||||
|
public:
|
||||||
|
URLRequestAsarJob(net::URLRequest* request,
|
||||||
|
net::NetworkDelegate* network_delegate,
|
||||||
|
Archive* archive,
|
||||||
|
const base::FilePath& file_path,
|
||||||
|
const scoped_refptr<base::TaskRunner>& file_task_runner);
|
||||||
|
|
||||||
|
// net::URLRequestJob:
|
||||||
|
virtual void Start() OVERRIDE;
|
||||||
|
virtual void Kill() OVERRIDE;
|
||||||
|
virtual bool ReadRawData(net::IOBuffer* buf,
|
||||||
|
int buf_size,
|
||||||
|
int* bytes_read) OVERRIDE;
|
||||||
|
virtual bool GetMimeType(std::string* mime_type) const OVERRIDE;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
virtual ~URLRequestAsarJob();
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Callback after opening file on a background thread.
|
||||||
|
void DidOpen(int result);
|
||||||
|
|
||||||
|
// Callback after seeking to the beginning of |byte_range_| in the file
|
||||||
|
// on a background thread.
|
||||||
|
void DidSeek(int64 result);
|
||||||
|
|
||||||
|
// Callback after data is asynchronously read from the file into |buf|.
|
||||||
|
void DidRead(scoped_refptr<net::IOBuffer> buf, int result);
|
||||||
|
|
||||||
|
Archive* archive_;
|
||||||
|
Archive::FileInfo file_info_;
|
||||||
|
base::FilePath file_path_;
|
||||||
|
|
||||||
|
scoped_ptr<net::FileStream> stream_;
|
||||||
|
int64 remaining_bytes_;
|
||||||
|
|
||||||
|
const scoped_refptr<base::TaskRunner> file_task_runner_;
|
||||||
|
|
||||||
|
base::WeakPtrFactory<URLRequestAsarJob> weak_ptr_factory_;
|
||||||
|
|
||||||
|
DISALLOW_COPY_AND_ASSIGN(URLRequestAsarJob);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace asar
|
||||||
|
|
||||||
|
#endif // ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_
|
117
atom/common/api/atom_api_asar.cc
Normal file
117
atom/common/api/atom_api_asar.cc
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "atom/common/asar/archive.h"
|
||||||
|
#include "atom/common/native_mate_converters/file_path_converter.h"
|
||||||
|
#include "native_mate/arguments.h"
|
||||||
|
#include "native_mate/dictionary.h"
|
||||||
|
#include "native_mate/object_template_builder.h"
|
||||||
|
#include "native_mate/wrappable.h"
|
||||||
|
|
||||||
|
#include "atom/common/node_includes.h"
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
class Archive : public mate::Wrappable {
|
||||||
|
public:
|
||||||
|
static v8::Handle<v8::Value> Create(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
scoped_ptr<asar::Archive> archive(new asar::Archive(path));
|
||||||
|
if (!archive->Init())
|
||||||
|
return v8::False(isolate);
|
||||||
|
return (new Archive(archive.Pass()))->GetWrapper(isolate);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
explicit Archive(scoped_ptr<asar::Archive> archive)
|
||||||
|
: archive_(archive.Pass()) {}
|
||||||
|
|
||||||
|
// Reads the offset and size of file.
|
||||||
|
v8::Handle<v8::Value> GetFileInfo(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
asar::Archive::FileInfo info;
|
||||||
|
if (!archive_ || !archive_->GetFileInfo(path, &info))
|
||||||
|
return v8::False(isolate);
|
||||||
|
mate::Dictionary dict(isolate, v8::Object::New(isolate));
|
||||||
|
dict.Set("size", info.size);
|
||||||
|
dict.Set("offset", info.offset);
|
||||||
|
return dict.GetHandle();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns a fake result of fs.stat(path).
|
||||||
|
v8::Handle<v8::Value> Stat(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
asar::Archive::Stats stats;
|
||||||
|
if (!archive_ || !archive_->Stat(path, &stats))
|
||||||
|
return v8::False(isolate);
|
||||||
|
mate::Dictionary dict(isolate, v8::Object::New(isolate));
|
||||||
|
dict.Set("size", stats.size);
|
||||||
|
dict.Set("offset", stats.offset);
|
||||||
|
dict.Set("isFile", stats.is_file);
|
||||||
|
dict.Set("isDirectory", stats.is_directory);
|
||||||
|
dict.Set("isLink", stats.is_link);
|
||||||
|
return dict.GetHandle();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns all files under a directory.
|
||||||
|
v8::Handle<v8::Value> Readdir(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
std::vector<base::FilePath> files;
|
||||||
|
if (!archive_ || !archive_->Readdir(path, &files))
|
||||||
|
return v8::False(isolate);
|
||||||
|
return mate::ConvertToV8(isolate, files);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the path of file with symbol link resolved.
|
||||||
|
v8::Handle<v8::Value> Realpath(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
base::FilePath realpath;
|
||||||
|
if (!archive_ || !archive_->Realpath(path, &realpath))
|
||||||
|
return v8::False(isolate);
|
||||||
|
return mate::ConvertToV8(isolate, realpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy the file out into a temporary file and returns the new path.
|
||||||
|
v8::Handle<v8::Value> CopyFileOut(v8::Isolate* isolate,
|
||||||
|
const base::FilePath& path) {
|
||||||
|
base::FilePath new_path;
|
||||||
|
if (!archive_ || !archive_->CopyFileOut(path, &new_path))
|
||||||
|
return v8::False(isolate);
|
||||||
|
return mate::ConvertToV8(isolate, new_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Free the resources used by archive.
|
||||||
|
void Destroy() {
|
||||||
|
archive_.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
// mate::Wrappable:
|
||||||
|
mate::ObjectTemplateBuilder GetObjectTemplateBuilder(v8::Isolate* isolate) {
|
||||||
|
return mate::ObjectTemplateBuilder(isolate)
|
||||||
|
.SetValue("path", archive_->path())
|
||||||
|
.SetMethod("getFileInfo", &Archive::GetFileInfo)
|
||||||
|
.SetMethod("stat", &Archive::Stat)
|
||||||
|
.SetMethod("readdir", &Archive::Readdir)
|
||||||
|
.SetMethod("realpath", &Archive::Realpath)
|
||||||
|
.SetMethod("copyFileOut", &Archive::CopyFileOut)
|
||||||
|
.SetMethod("destroy", &Archive::Destroy);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
scoped_ptr<asar::Archive> archive_;
|
||||||
|
|
||||||
|
DISALLOW_COPY_AND_ASSIGN(Archive);
|
||||||
|
};
|
||||||
|
|
||||||
|
void Initialize(v8::Handle<v8::Object> exports, v8::Handle<v8::Value> unused,
|
||||||
|
v8::Handle<v8::Context> context, void* priv) {
|
||||||
|
mate::Dictionary dict(context->GetIsolate(), exports);
|
||||||
|
dict.SetMethod("createArchive", &Archive::Create);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
NODE_MODULE_CONTEXT_AWARE_BUILTIN(atom_common_asar, Initialize)
|
252
atom/common/asar/archive.cc
Normal file
252
atom/common/asar/archive.cc
Normal file
|
@ -0,0 +1,252 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include "atom/common/asar/archive.h"
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "atom/common/asar/scoped_temporary_file.h"
|
||||||
|
#include "base/files/file.h"
|
||||||
|
#include "base/logging.h"
|
||||||
|
#include "base/pickle.h"
|
||||||
|
#include "base/json/json_string_value_serializer.h"
|
||||||
|
#include "base/strings/string_number_conversions.h"
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
#if defined(OS_WIN)
|
||||||
|
const char kSeparators[] = "\\/";
|
||||||
|
#else
|
||||||
|
const char kSeparators[] = "/";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
bool GetNodeFromPath(std::string path,
|
||||||
|
const base::DictionaryValue* root,
|
||||||
|
const base::DictionaryValue** out);
|
||||||
|
|
||||||
|
// Gets the "files" from "dir".
|
||||||
|
bool GetFilesNode(const base::DictionaryValue* root,
|
||||||
|
const base::DictionaryValue* dir,
|
||||||
|
const base::DictionaryValue** out) {
|
||||||
|
// Test for symbol linked directory.
|
||||||
|
std::string link;
|
||||||
|
if (dir->GetStringWithoutPathExpansion("link", &link)) {
|
||||||
|
const base::DictionaryValue* linked_node = NULL;
|
||||||
|
if (!GetNodeFromPath(link, root, &linked_node))
|
||||||
|
return false;
|
||||||
|
dir = linked_node;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dir->GetDictionaryWithoutPathExpansion("files", out);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets sub-file "name" from "dir".
|
||||||
|
bool GetChildNode(const base::DictionaryValue* root,
|
||||||
|
const std::string& name,
|
||||||
|
const base::DictionaryValue* dir,
|
||||||
|
const base::DictionaryValue** out) {
|
||||||
|
const base::DictionaryValue* files = NULL;
|
||||||
|
return GetFilesNode(root, dir, &files) &&
|
||||||
|
files->GetDictionaryWithoutPathExpansion(name, out);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets the node of "path" from "root".
|
||||||
|
bool GetNodeFromPath(std::string path,
|
||||||
|
const base::DictionaryValue* root,
|
||||||
|
const base::DictionaryValue** out) {
|
||||||
|
if (path == "") {
|
||||||
|
*out = root;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const base::DictionaryValue* dir = root;
|
||||||
|
for (size_t delimiter_position = path.find_first_of(kSeparators);
|
||||||
|
delimiter_position != std::string::npos;
|
||||||
|
delimiter_position = path.find_first_of(kSeparators)) {
|
||||||
|
const base::DictionaryValue* child = NULL;
|
||||||
|
if (!GetChildNode(root, path.substr(0, delimiter_position), dir, &child))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
dir = child;
|
||||||
|
path.erase(0, delimiter_position + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetChildNode(root, path, dir, out);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FillFileInfoWithNode(Archive::FileInfo* info,
|
||||||
|
uint32 header_size,
|
||||||
|
const base::DictionaryValue* node) {
|
||||||
|
std::string offset;
|
||||||
|
if (!node->GetString("offset", &offset))
|
||||||
|
return false;
|
||||||
|
if (!base::StringToUint64(offset, &info->offset))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
int size;
|
||||||
|
if (!node->GetInteger("size", &size))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
info->offset += header_size;
|
||||||
|
info->size = static_cast<uint32>(size);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
Archive::Archive(const base::FilePath& path)
|
||||||
|
: path_(path),
|
||||||
|
header_size_(0) {
|
||||||
|
}
|
||||||
|
|
||||||
|
Archive::~Archive() {
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::Init() {
|
||||||
|
base::File file(path_, base::File::FLAG_OPEN | base::File::FLAG_READ);
|
||||||
|
if (!file.IsValid())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
std::vector<char> buf;
|
||||||
|
int len;
|
||||||
|
|
||||||
|
buf.resize(8);
|
||||||
|
len = file.ReadAtCurrentPos(buf.data(), buf.size());
|
||||||
|
if (len != static_cast<int>(buf.size())) {
|
||||||
|
PLOG(ERROR) << "Failed to read header size from " << path_.value();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32 size;
|
||||||
|
if (!PickleIterator(Pickle(buf.data(), buf.size())).ReadUInt32(&size)) {
|
||||||
|
LOG(ERROR) << "Failed to parse header size from " << path_.value();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.resize(size);
|
||||||
|
len = file.ReadAtCurrentPos(buf.data(), buf.size());
|
||||||
|
if (len != static_cast<int>(buf.size())) {
|
||||||
|
PLOG(ERROR) << "Failed to read header from " << path_.value();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string header;
|
||||||
|
if (!PickleIterator(Pickle(buf.data(), buf.size())).ReadString(&header)) {
|
||||||
|
LOG(ERROR) << "Failed to parse header from " << path_.value();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string error;
|
||||||
|
JSONStringValueSerializer serializer(&header);
|
||||||
|
base::Value* value = serializer.Deserialize(NULL, &error);
|
||||||
|
if (!value || !value->IsType(base::Value::TYPE_DICTIONARY)) {
|
||||||
|
LOG(ERROR) << "Failed to parse header: " << error;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
header_size_ = 8 + size;
|
||||||
|
header_.reset(static_cast<base::DictionaryValue*>(value));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::GetFileInfo(const base::FilePath& path, FileInfo* info) {
|
||||||
|
if (!header_)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const base::DictionaryValue* node;
|
||||||
|
if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
std::string link;
|
||||||
|
if (node->GetString("link", &link))
|
||||||
|
return GetFileInfo(base::FilePath::FromUTF8Unsafe(link), info);
|
||||||
|
|
||||||
|
return FillFileInfoWithNode(info, header_size_, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::Stat(const base::FilePath& path, Stats* stats) {
|
||||||
|
if (!header_)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const base::DictionaryValue* node;
|
||||||
|
if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (node->HasKey("link")) {
|
||||||
|
stats->is_file = false;
|
||||||
|
stats->is_link = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node->HasKey("files")) {
|
||||||
|
stats->is_file = false;
|
||||||
|
stats->is_directory = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FillFileInfoWithNode(stats, header_size_, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::Readdir(const base::FilePath& path,
|
||||||
|
std::vector<base::FilePath>* list) {
|
||||||
|
if (!header_)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const base::DictionaryValue* node;
|
||||||
|
if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const base::DictionaryValue* files;
|
||||||
|
if (!GetFilesNode(header_.get(), node, &files))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
base::DictionaryValue::Iterator iter(*files);
|
||||||
|
while (!iter.IsAtEnd()) {
|
||||||
|
list->push_back(base::FilePath::FromUTF8Unsafe(iter.key()));
|
||||||
|
iter.Advance();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::Realpath(const base::FilePath& path, base::FilePath* realpath) {
|
||||||
|
if (!header_)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const base::DictionaryValue* node;
|
||||||
|
if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
std::string link;
|
||||||
|
if (node->GetString("link", &link)) {
|
||||||
|
*realpath = base::FilePath::FromUTF8Unsafe(link);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
*realpath = path;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Archive::CopyFileOut(const base::FilePath& path, base::FilePath* out) {
|
||||||
|
if (external_files_.contains(path)) {
|
||||||
|
*out = external_files_.get(path)->path();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
FileInfo info;
|
||||||
|
if (!GetFileInfo(path, &info))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
scoped_ptr<ScopedTemporaryFile> temp_file(new ScopedTemporaryFile);
|
||||||
|
if (!temp_file->InitFromFile(path_, info.offset, info.size))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
*out = temp_file->path();
|
||||||
|
external_files_.set(path, temp_file.Pass());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace asar
|
76
atom/common/asar/archive.h
Normal file
76
atom/common/asar/archive.h
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#ifndef ATOM_COMMON_ASAR_ARCHIVE_H_
|
||||||
|
#define ATOM_COMMON_ASAR_ARCHIVE_H_
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "base/containers/scoped_ptr_hash_map.h"
|
||||||
|
#include "base/files/file_path.h"
|
||||||
|
#include "base/memory/scoped_ptr.h"
|
||||||
|
|
||||||
|
namespace base {
|
||||||
|
class DictionaryValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
class ScopedTemporaryFile;
|
||||||
|
|
||||||
|
// This class represents an asar package, and provides methods to read
|
||||||
|
// information from it.
|
||||||
|
class Archive {
|
||||||
|
public:
|
||||||
|
struct FileInfo {
|
||||||
|
FileInfo() : size(0), offset(0) {}
|
||||||
|
uint32 size;
|
||||||
|
uint64 offset;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Stats : public FileInfo {
|
||||||
|
Stats() : is_file(true), is_directory(false), is_link(false) {}
|
||||||
|
bool is_file;
|
||||||
|
bool is_directory;
|
||||||
|
bool is_link;
|
||||||
|
};
|
||||||
|
|
||||||
|
explicit Archive(const base::FilePath& path);
|
||||||
|
virtual ~Archive();
|
||||||
|
|
||||||
|
// Read and parse the header.
|
||||||
|
bool Init();
|
||||||
|
|
||||||
|
// Get the info of a file.
|
||||||
|
bool GetFileInfo(const base::FilePath& path, FileInfo* info);
|
||||||
|
|
||||||
|
// Fs.stat(path).
|
||||||
|
bool Stat(const base::FilePath& path, Stats* stats);
|
||||||
|
|
||||||
|
// Fs.readdir(path).
|
||||||
|
bool Readdir(const base::FilePath& path, std::vector<base::FilePath>* files);
|
||||||
|
|
||||||
|
// Fs.realpath(path).
|
||||||
|
bool Realpath(const base::FilePath& path, base::FilePath* realpath);
|
||||||
|
|
||||||
|
// Copy the file into a temporary file, and return the new path.
|
||||||
|
bool CopyFileOut(const base::FilePath& path, base::FilePath* out);
|
||||||
|
|
||||||
|
base::FilePath path() const { return path_; }
|
||||||
|
base::DictionaryValue* header() const { return header_.get(); }
|
||||||
|
|
||||||
|
private:
|
||||||
|
base::FilePath path_;
|
||||||
|
uint32 header_size_;
|
||||||
|
scoped_ptr<base::DictionaryValue> header_;
|
||||||
|
|
||||||
|
// Cached external temporary files.
|
||||||
|
base::ScopedPtrHashMap<base::FilePath, ScopedTemporaryFile> external_files_;
|
||||||
|
|
||||||
|
DISALLOW_COPY_AND_ASSIGN(Archive);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace asar
|
||||||
|
|
||||||
|
#endif // ATOM_COMMON_ASAR_ARCHIVE_H_
|
54
atom/common/asar/scoped_temporary_file.cc
Normal file
54
atom/common/asar/scoped_temporary_file.cc
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include "atom/common/asar/scoped_temporary_file.h"
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "base/file_util.h"
|
||||||
|
#include "base/threading/thread_restrictions.h"
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
ScopedTemporaryFile::ScopedTemporaryFile() {
|
||||||
|
}
|
||||||
|
|
||||||
|
ScopedTemporaryFile::~ScopedTemporaryFile() {
|
||||||
|
if (!path_.empty()) {
|
||||||
|
base::ThreadRestrictions::ScopedAllowIO allow_io;
|
||||||
|
base::DeleteFile(path_, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ScopedTemporaryFile::Init() {
|
||||||
|
if (!path_.empty())
|
||||||
|
return true;
|
||||||
|
|
||||||
|
base::ThreadRestrictions::ScopedAllowIO allow_io;
|
||||||
|
return base::CreateTemporaryFile(&path_);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ScopedTemporaryFile::InitFromFile(const base::FilePath& path,
|
||||||
|
uint64 offset, uint64 size) {
|
||||||
|
if (!Init())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
base::File src(path, base::File::FLAG_OPEN | base::File::FLAG_READ);
|
||||||
|
if (!src.IsValid())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
std::vector<char> buf(size);
|
||||||
|
int len = src.Read(offset, buf.data(), buf.size());
|
||||||
|
if (len != static_cast<int>(size))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
base::File dest(path_, base::File::FLAG_OPEN | base::File::FLAG_WRITE);
|
||||||
|
if (!dest.IsValid())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return dest.WriteAtCurrentPos(buf.data(), buf.size()) ==
|
||||||
|
static_cast<int>(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace asar
|
37
atom/common/asar/scoped_temporary_file.h
Normal file
37
atom/common/asar/scoped_temporary_file.h
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
// Copyright (c) 2014 GitHub, Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by the MIT license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#ifndef ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_
|
||||||
|
#define ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_
|
||||||
|
|
||||||
|
#include "base/files/file_path.h"
|
||||||
|
|
||||||
|
namespace asar {
|
||||||
|
|
||||||
|
// An object representing a temporary file that should be cleaned up when this
|
||||||
|
// object goes out of scope. Note that since deletion occurs during the
|
||||||
|
// destructor, no further error handling is possible if the directory fails to
|
||||||
|
// be deleted. As a result, deletion is not guaranteed by this class.
|
||||||
|
class ScopedTemporaryFile {
|
||||||
|
public:
|
||||||
|
ScopedTemporaryFile();
|
||||||
|
virtual ~ScopedTemporaryFile();
|
||||||
|
|
||||||
|
// Init an empty temporary file.
|
||||||
|
bool Init();
|
||||||
|
|
||||||
|
// Init an temporary file and fill it with content of |path|.
|
||||||
|
bool InitFromFile(const base::FilePath& path, uint64 offset, uint64 size);
|
||||||
|
|
||||||
|
base::FilePath path() const { return path_; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
base::FilePath path_;
|
||||||
|
|
||||||
|
DISALLOW_COPY_AND_ASSIGN(ScopedTemporaryFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace asar
|
||||||
|
|
||||||
|
#endif // ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_
|
296
atom/common/lib/asar.coffee
Normal file
296
atom/common/lib/asar.coffee
Normal file
|
@ -0,0 +1,296 @@
|
||||||
|
asar = process.atomBinding 'asar'
|
||||||
|
child_process = require 'child_process'
|
||||||
|
fs = require 'fs'
|
||||||
|
path = require 'path'
|
||||||
|
util = require 'util'
|
||||||
|
|
||||||
|
# Cache asar archive objects.
|
||||||
|
cachedArchives = {}
|
||||||
|
getOrCreateArchive = (p) ->
|
||||||
|
archive = cachedArchives[p]
|
||||||
|
return archive if archive?
|
||||||
|
archive = asar.createArchive p
|
||||||
|
return false unless archive
|
||||||
|
cachedArchives[p] = archive
|
||||||
|
|
||||||
|
# Clean cache on quit.
|
||||||
|
process.on 'exit', ->
|
||||||
|
archive.destroy() for p, archive of cachedArchives
|
||||||
|
|
||||||
|
# Separate asar package's path from full path.
|
||||||
|
splitPath = (p) ->
|
||||||
|
return [false] if typeof p isnt 'string'
|
||||||
|
return [true, p, ''] if p.substr(-5) is '.asar'
|
||||||
|
index = p.lastIndexOf ".asar#{path.sep}"
|
||||||
|
return [false] if index is -1
|
||||||
|
[true, p.substr(0, index + 5), p.substr(index + 6)]
|
||||||
|
|
||||||
|
# Convert asar archive's Stats object to fs's Stats object.
|
||||||
|
nextInode = 0
|
||||||
|
uid = if process.getuid? then process.getuid() else 0
|
||||||
|
gid = if process.getgid? then process.getgid() else 0
|
||||||
|
asarStatsToFsStats = (stats) ->
|
||||||
|
{
|
||||||
|
dev: 1,
|
||||||
|
ino: ++nextInode,
|
||||||
|
mode: 33188,
|
||||||
|
nlink: 1,
|
||||||
|
uid: uid,
|
||||||
|
gid: gid,
|
||||||
|
rdev: 0,
|
||||||
|
size: stats.size
|
||||||
|
isFile: -> stats.isFile
|
||||||
|
isDirectory: -> stats.isDirectory
|
||||||
|
isSymbolicLink: -> stats.isLink
|
||||||
|
isBlockDevice: -> false
|
||||||
|
isCharacterDevice: -> false
|
||||||
|
isFIFO: -> false
|
||||||
|
isSocket: -> false
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a ENOENT error.
|
||||||
|
createNotFoundError = (asarPath, filePath) ->
|
||||||
|
error = new Error("ENOENT, #{filePath} not found in #{asarPath}")
|
||||||
|
error.code = "ENOENT"
|
||||||
|
error.errno = -2
|
||||||
|
error
|
||||||
|
|
||||||
|
# Override fs APIs.
|
||||||
|
lstatSync = fs.lstatSync
|
||||||
|
fs.lstatSync = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return lstatSync p unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
throw new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
stats = archive.stat filePath
|
||||||
|
throw createNotFoundError(asarPath, filePath) unless stats
|
||||||
|
|
||||||
|
asarStatsToFsStats stats
|
||||||
|
|
||||||
|
lstat = fs.lstat
|
||||||
|
fs.lstat = (p, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return lstat p, callback unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
stats = getOrCreateArchive(asarPath).stat filePath
|
||||||
|
return callback createNotFoundError(asarPath, filePath) unless stats
|
||||||
|
|
||||||
|
process.nextTick -> callback null, asarStatsToFsStats stats
|
||||||
|
|
||||||
|
statSync = fs.statSync
|
||||||
|
fs.statSync = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return statSync p unless isAsar
|
||||||
|
|
||||||
|
# Do not distinguish links for now.
|
||||||
|
fs.lstatSync p
|
||||||
|
|
||||||
|
stat = fs.stat
|
||||||
|
fs.stat = (p, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return stat p, callback unless isAsar
|
||||||
|
|
||||||
|
# Do not distinguish links for now.
|
||||||
|
process.nextTick -> fs.lstat p, callback
|
||||||
|
|
||||||
|
statSyncNoException = fs.statSyncNoException
|
||||||
|
fs.statSyncNoException = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return statSyncNoException p unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return false unless archive
|
||||||
|
stats = archive.stat filePath
|
||||||
|
return false unless stats
|
||||||
|
asarStatsToFsStats stats
|
||||||
|
|
||||||
|
realpathSync = fs.realpathSync
|
||||||
|
fs.realpathSync = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return realpathSync.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
throw new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
real = archive.realpath filePath
|
||||||
|
throw createNotFoundError(asarPath, filePath) if real is false
|
||||||
|
|
||||||
|
path.join realpathSync(asarPath), real
|
||||||
|
|
||||||
|
realpath = fs.realpath
|
||||||
|
fs.realpath = (p, cache, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return realpath.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
if typeof cache is 'function'
|
||||||
|
callback = cache
|
||||||
|
cache = undefined
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
real = archive.realpath filePath
|
||||||
|
return callback createNotFoundError(asarPath, filePath) if real is false
|
||||||
|
|
||||||
|
realpath asarPath, (err, p) ->
|
||||||
|
return callback err if err
|
||||||
|
callback null, path.join(p, real)
|
||||||
|
|
||||||
|
exists = fs.exists
|
||||||
|
fs.exists = (p, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return exists p, callback unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
process.nextTick -> callback archive.stat(filePath) isnt false
|
||||||
|
|
||||||
|
existsSync = fs.existsSync
|
||||||
|
fs.existsSync = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return existsSync p unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return false unless archive
|
||||||
|
|
||||||
|
archive.stat(filePath) isnt false
|
||||||
|
|
||||||
|
open = fs.open
|
||||||
|
readFile = fs.readFile
|
||||||
|
fs.readFile = (p, options, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return readFile.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
if typeof options is 'function'
|
||||||
|
callback = options
|
||||||
|
options = undefined
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
info = archive.getFileInfo filePath
|
||||||
|
return callback createNotFoundError(asarPath, filePath) unless info
|
||||||
|
|
||||||
|
if not options
|
||||||
|
options = encoding: null, flag: 'r'
|
||||||
|
else if util.isString options
|
||||||
|
options = encoding: options, flag: 'r'
|
||||||
|
else if not util.isObject options
|
||||||
|
throw new TypeError('Bad arguments')
|
||||||
|
|
||||||
|
flag = options.flag || 'r'
|
||||||
|
encoding = options.encoding
|
||||||
|
|
||||||
|
buffer = new Buffer(info.size)
|
||||||
|
open archive.path, flag, (error, fd) ->
|
||||||
|
return callback error if error
|
||||||
|
fs.read fd, buffer, 0, info.size, info.offset, (error) ->
|
||||||
|
fs.close fd, ->
|
||||||
|
callback error, if encoding then buffer.toString encoding else buffer
|
||||||
|
|
||||||
|
openSync = fs.openSync
|
||||||
|
readFileSync = fs.readFileSync
|
||||||
|
fs.readFileSync = (p, options) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return readFileSync.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
throw new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
info = archive.getFileInfo filePath
|
||||||
|
throw createNotFoundError(asarPath, filePath) unless info
|
||||||
|
|
||||||
|
if not options
|
||||||
|
options = encoding: null, flag: 'r'
|
||||||
|
else if util.isString options
|
||||||
|
options = encoding: options, flag: 'r'
|
||||||
|
else if not util.isObject options
|
||||||
|
throw new TypeError('Bad arguments')
|
||||||
|
|
||||||
|
flag = options.flag || 'r'
|
||||||
|
encoding = options.encoding
|
||||||
|
|
||||||
|
buffer = new Buffer(info.size)
|
||||||
|
fd = openSync archive.path, flag
|
||||||
|
try
|
||||||
|
fs.readSync fd, buffer, 0, info.size, info.offset
|
||||||
|
catch e
|
||||||
|
throw e
|
||||||
|
finally
|
||||||
|
fs.closeSync fd
|
||||||
|
if encoding then buffer.toString encoding else buffer
|
||||||
|
|
||||||
|
readdir = fs.readdir
|
||||||
|
fs.readdir = (p, callback) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return readdir.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
files = archive.readdir filePath
|
||||||
|
return callback createNotFoundError(asarPath, filePath) unless files
|
||||||
|
|
||||||
|
process.nextTick -> callback null, files
|
||||||
|
|
||||||
|
readdirSync = fs.readdirSync
|
||||||
|
fs.readdirSync = (p) ->
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return readdirSync.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
throw new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
files = archive.readdir filePath
|
||||||
|
throw createNotFoundError(asarPath, filePath) unless files
|
||||||
|
|
||||||
|
files
|
||||||
|
|
||||||
|
# Override APIs that rely on passing file path instead of content to C++.
|
||||||
|
overrideAPISync = (module, name, arg = 0) ->
|
||||||
|
old = module[name]
|
||||||
|
module[name] = ->
|
||||||
|
p = arguments[arg]
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return old.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
throw new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
newPath = archive.copyFileOut filePath
|
||||||
|
throw createNotFoundError(asarPath, filePath) unless newPath
|
||||||
|
|
||||||
|
arguments[arg] = newPath
|
||||||
|
old.apply this, arguments
|
||||||
|
|
||||||
|
overrideAPI = (module, name, arg = 0) ->
|
||||||
|
old = module[name]
|
||||||
|
module[name] = ->
|
||||||
|
p = arguments[arg]
|
||||||
|
[isAsar, asarPath, filePath] = splitPath p
|
||||||
|
return old.apply this, arguments unless isAsar
|
||||||
|
|
||||||
|
callback = arguments[arguments.length - 1]
|
||||||
|
return overrideAPISync module, name, arg unless typeof callback is 'function'
|
||||||
|
|
||||||
|
archive = getOrCreateArchive asarPath
|
||||||
|
return callback new Error("Invalid package #{asarPath}") unless archive
|
||||||
|
|
||||||
|
newPath = archive.copyFileOut filePath
|
||||||
|
return callback createNotFoundError(asarPath, filePath) unless newPath
|
||||||
|
|
||||||
|
arguments[arg] = newPath
|
||||||
|
old.apply this, arguments
|
||||||
|
|
||||||
|
overrideAPI fs, 'open'
|
||||||
|
overrideAPI child_process, 'execFile'
|
||||||
|
overrideAPISync process, 'dlopen', 1
|
||||||
|
overrideAPISync require('module')._extensions, '.node', 1
|
||||||
|
overrideAPISync fs, 'openSync'
|
||||||
|
overrideAPISync child_process, 'fork'
|
|
@ -33,3 +33,6 @@ global.clearImmediate = timers.clearImmediate
|
||||||
if process.type is 'browser'
|
if process.type is 'browser'
|
||||||
global.setTimeout = wrapWithActivateUvLoop timers.setTimeout
|
global.setTimeout = wrapWithActivateUvLoop timers.setTimeout
|
||||||
global.setInterval = wrapWithActivateUvLoop timers.setInterval
|
global.setInterval = wrapWithActivateUvLoop timers.setInterval
|
||||||
|
|
||||||
|
# Add support for asar packages.
|
||||||
|
require './asar'
|
||||||
|
|
|
@ -69,6 +69,7 @@ REFERENCE_MODULE(atom_browser_protocol);
|
||||||
REFERENCE_MODULE(atom_browser_global_shortcut);
|
REFERENCE_MODULE(atom_browser_global_shortcut);
|
||||||
REFERENCE_MODULE(atom_browser_tray);
|
REFERENCE_MODULE(atom_browser_tray);
|
||||||
REFERENCE_MODULE(atom_browser_window);
|
REFERENCE_MODULE(atom_browser_window);
|
||||||
|
REFERENCE_MODULE(atom_common_asar);
|
||||||
REFERENCE_MODULE(atom_common_clipboard);
|
REFERENCE_MODULE(atom_common_clipboard);
|
||||||
REFERENCE_MODULE(atom_common_crash_reporter);
|
REFERENCE_MODULE(atom_common_crash_reporter);
|
||||||
REFERENCE_MODULE(atom_common_id_weak_map);
|
REFERENCE_MODULE(atom_common_id_weak_map);
|
||||||
|
|
|
@ -24,8 +24,12 @@ require path.resolve(__dirname, '..', '..', 'common', 'lib', 'init.js')
|
||||||
global.require = require
|
global.require = require
|
||||||
global.module = module
|
global.module = module
|
||||||
|
|
||||||
# Set the __filename to the path of html file if it's file:// protocol.
|
# Emit the 'exit' event when page is unloading.
|
||||||
if window.location.protocol is 'file:'
|
window.addEventListener 'unload', ->
|
||||||
|
process.emit 'exit'
|
||||||
|
|
||||||
|
# Set the __filename to the path of html file if it's file: or asar: protocol.
|
||||||
|
if window.location.protocol in ['file:', 'asar:']
|
||||||
pathname =
|
pathname =
|
||||||
if process.platform is 'win32'
|
if process.platform is 'win32'
|
||||||
window.location.pathname.substr 1
|
window.location.pathname.substr 1
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
* [Quick start](tutorial/quick-start.md)
|
* [Quick start](tutorial/quick-start.md)
|
||||||
* [Application distribution](tutorial/application-distribution.md)
|
* [Application distribution](tutorial/application-distribution.md)
|
||||||
|
* [Application packaging](tutorial/application-packaging.md)
|
||||||
* [Using native node modules](tutorial/using-native-node-modules.md)
|
* [Using native node modules](tutorial/using-native-node-modules.md)
|
||||||
* [Debugging browser process](tutorial/debugging-browser-process.md)
|
* [Debugging browser process](tutorial/debugging-browser-process.md)
|
||||||
* [Using Selenium and WebDriver](tutorial/using-selenium-and-webdriver.md)
|
* [Using Selenium and WebDriver](tutorial/using-selenium-and-webdriver.md)
|
||||||
|
|
|
@ -45,6 +45,10 @@ terminating the application.
|
||||||
See description of `window-all-closed` for the differences between `will-quit`
|
See description of `window-all-closed` for the differences between `will-quit`
|
||||||
and it.
|
and it.
|
||||||
|
|
||||||
|
## Event: quit
|
||||||
|
|
||||||
|
Emitted when application is quitting.
|
||||||
|
|
||||||
## Event: open-file
|
## Event: open-file
|
||||||
|
|
||||||
* `event` Event
|
* `event` Event
|
||||||
|
|
|
@ -27,7 +27,19 @@ Then execute `Atom.app` (or `atom` on Linux, and `atom.exe` on Windows), and
|
||||||
atom-shell will start as your app. The `atom-shell` directory would then be
|
atom-shell will start as your app. The `atom-shell` directory would then be
|
||||||
your distribution that should be delivered to final users.
|
your distribution that should be delivered to final users.
|
||||||
|
|
||||||
## Build with grunt
|
## Packaging your app into a file
|
||||||
|
|
||||||
|
Apart from shipping your app by copying all its sources files, you can also
|
||||||
|
package your app into [asar](https://github.com/atom/asar) archive to avoid
|
||||||
|
exposing your app's source code to users.
|
||||||
|
|
||||||
|
To use the `asar` archive to replace the `app` folder, you need to rename the
|
||||||
|
archive to `app.asar`, and put it under atom-shell's resources directory,
|
||||||
|
atom-shell will then try read the archive and start from it.
|
||||||
|
|
||||||
|
More details can be found in [Application packaging](application-packaging.md).
|
||||||
|
|
||||||
|
## Building with grunt
|
||||||
|
|
||||||
If you build your application with `grunt` there is a grunt task that can
|
If you build your application with `grunt` there is a grunt task that can
|
||||||
download atom-shell for your current platform automatically:
|
download atom-shell for your current platform automatically:
|
||||||
|
|
139
docs/tutorial/application-packaging.md
Normal file
139
docs/tutorial/application-packaging.md
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
# Application packaging
|
||||||
|
|
||||||
|
To protect your app's resources and source code from the users, you can choose
|
||||||
|
to package your app into [asar][asar] archive with little changes to your source
|
||||||
|
code.
|
||||||
|
|
||||||
|
## Generating `asar` archive
|
||||||
|
|
||||||
|
The [asar][asar] archive is a simple tar-like format that concatenates files
|
||||||
|
into a single file, atom-shell can read arbitrary file in it without unpacking
|
||||||
|
the whole file.
|
||||||
|
|
||||||
|
Following is the steps to package your app into `asar` archive:
|
||||||
|
|
||||||
|
### 1. Install asar utility
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install -g asar
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Package with `asar pack`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ asar pack your-app app.asar
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using `asar` archives
|
||||||
|
|
||||||
|
In atom-shell there are two sets of APIs: Node APIs provided by Node.js, and Web
|
||||||
|
APIs provided by Chromium. Both APIs support reading file from `asar` archives.
|
||||||
|
|
||||||
|
### Node API
|
||||||
|
|
||||||
|
With special patches in atom-shell, Node APIs like `fs.readFile` and `require`
|
||||||
|
treat `asar` archives as virtual directories, and the files in it as normal
|
||||||
|
files in filesystem.
|
||||||
|
|
||||||
|
For example, suppose we have an `example.asar` archive under `/path/to`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ asar list /path/to/example.asar
|
||||||
|
/app.js
|
||||||
|
/file.txt
|
||||||
|
/dir/module.js
|
||||||
|
/static/index.html
|
||||||
|
/static/main.css
|
||||||
|
/static/jquery.min.js
|
||||||
|
```
|
||||||
|
|
||||||
|
Read a file in `asar` archive:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var fs = require('fs');
|
||||||
|
fs.readFileSync('/path/to/example.asar/file.txt');
|
||||||
|
```
|
||||||
|
|
||||||
|
List all files under the root of archive:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var fs = require('fs');
|
||||||
|
fs.readdirSync('/path/to/example.asar');
|
||||||
|
```
|
||||||
|
|
||||||
|
Use a module from the archive:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
require('/path/to/example.asar/dir/module.js');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Web API
|
||||||
|
|
||||||
|
In web page files in archive can be requests by using the `asar:` protocol,
|
||||||
|
like node API, `asar` archives are treated as directories.
|
||||||
|
|
||||||
|
For example, to get a file with `$.get`:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script>
|
||||||
|
var $ = require('./jquery.min.js');
|
||||||
|
$.get('asar:/path/to/example.asar/file.txt', function(data) {
|
||||||
|
console.log(data);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
The `asar:` protocol can also be used to request normal files in filesystem,
|
||||||
|
just like the `file:` protocol. But unlike `file:` protocol, there is no slashes
|
||||||
|
(`//`) after `asar:`.
|
||||||
|
|
||||||
|
You can also display a web page in `asar` archive with `BrowserWindow`:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var BrowserWindow = require('browser-window');
|
||||||
|
var win = new BrowserWindow({width: 800, height: 600});
|
||||||
|
win.loadUrl('asar:/path/to/example.asar/static/index.html');
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations on Node API
|
||||||
|
|
||||||
|
Even though we tried hard to make `asar` archives in Node API work like
|
||||||
|
directories as much as possible, there are still limitations due to the
|
||||||
|
low-level nature of Node API.
|
||||||
|
|
||||||
|
### Archives are read only
|
||||||
|
|
||||||
|
The archives can not be modifies so all Node APIs that can modify files will not
|
||||||
|
work with `asar` archives.
|
||||||
|
|
||||||
|
### Working directory can not be set to directories in archive
|
||||||
|
|
||||||
|
Though `asar` archives are treated as directories, there are no actual
|
||||||
|
directories in the filesystem, so you can never set working directory to
|
||||||
|
directories in `asar` archives, passing them to `cwd` option of some APIs will
|
||||||
|
also cause errors.
|
||||||
|
|
||||||
|
### Extra unpacking on some APIs
|
||||||
|
|
||||||
|
Most `fs` APIs can read file or get file's information from `asar` archives
|
||||||
|
without unpacking, but for some APIs that rely on passing the real file path to
|
||||||
|
underlying system calls, atom-shell will extract the needed file into a
|
||||||
|
temporary file and pass the path of the temporary file to the APIs to make them
|
||||||
|
work. This adds a little overhead for those APIs.
|
||||||
|
|
||||||
|
APIs that requires extra unpacking are:
|
||||||
|
|
||||||
|
* `child_process.execFile`
|
||||||
|
* `child_process.fork`
|
||||||
|
* `fs.open`
|
||||||
|
* `fs.openSync`
|
||||||
|
* `process.dlopen` - Used by `require` on native modules
|
||||||
|
|
||||||
|
### Fake stat information of `fs.stat`
|
||||||
|
|
||||||
|
The `Stats` object returned by `fs.stat` and its friends on files in `asar`
|
||||||
|
archives are generated by guessing, because those files do not exist on
|
||||||
|
filesystem. So you should not trust the `Stats` object except for getting file
|
||||||
|
size and checking file type.
|
||||||
|
|
||||||
|
[asar]: https://github.com/atom/asar
|
|
@ -8,8 +8,8 @@ a variant of the Node.js runtime which is focused on desktop applications
|
||||||
instead of web servers.
|
instead of web servers.
|
||||||
|
|
||||||
It doesn't mean atom-shell is a JavaScript binding to GUI libraries. Instead,
|
It doesn't mean atom-shell is a JavaScript binding to GUI libraries. Instead,
|
||||||
atom-shell uses web pages as its GUI, so you could also see it as a minimal Chromium
|
atom-shell uses web pages as its GUI, so you could also see it as a minimal
|
||||||
browser, controlled by JavaScript.
|
Chromium browser, controlled by JavaScript.
|
||||||
|
|
||||||
### The browser side
|
### The browser side
|
||||||
|
|
||||||
|
@ -18,9 +18,11 @@ are two types of JavaScript scripts: the server side scripts and the client side
|
||||||
scripts. Server-side JavaScript is that which runs on the Node.js
|
scripts. Server-side JavaScript is that which runs on the Node.js
|
||||||
runtime, while client-side JavaScript runs inside the user's browser.
|
runtime, while client-side JavaScript runs inside the user's browser.
|
||||||
|
|
||||||
In atom-shell we have similar concepts: Since atom-shell displays a GUI by showing
|
In atom-shell we have similar concepts: Since atom-shell displays a GUI by
|
||||||
web pages, we have **scripts that run in the web page**, and also **scripts run by the atom-shell runtime**, which creates those web pages.
|
showing web pages, we have **scripts that run in the web page**, and also
|
||||||
Like Node.js, we call them **client scripts**, and **browser scripts** (meaning the browser replaces the concept of the server here).
|
**scripts run by the atom-shell runtime**, which creates those web pages.
|
||||||
|
Like Node.js, we call them **client scripts**, and **browser scripts**
|
||||||
|
(meaning the browser replaces the concept of the server here).
|
||||||
|
|
||||||
In traditional Node.js applications, communication between server and
|
In traditional Node.js applications, communication between server and
|
||||||
client is usually facilitated via web sockets. In atom-shell, we have provided
|
client is usually facilitated via web sockets. In atom-shell, we have provided
|
||||||
|
@ -30,19 +32,20 @@ support.
|
||||||
|
|
||||||
### Web page and Node.js
|
### Web page and Node.js
|
||||||
|
|
||||||
Normal web pages are designed to not reach outside of the browser, which makes them
|
Normal web pages are designed to not reach outside of the browser, which makes
|
||||||
unsuitable for interacting with native systems. Atom-shell provides Node.js APIs
|
them unsuitable for interacting with native systems. Atom-shell provides Node.js
|
||||||
in web pages so you can access native resources from web pages, just like
|
APIs in web pages so you can access native resources from web pages, just like
|
||||||
[Node-Webkit](https://github.com/rogerwang/node-webkit).
|
[Node-Webkit](https://github.com/rogerwang/node-webkit).
|
||||||
|
|
||||||
But unlike Node-Webkit, you cannot do native GUI related operations in web
|
But unlike Node-Webkit, you cannot do native GUI related operations in web
|
||||||
pages. Instead you need to do them on the browser side by sending messages to it, or
|
pages. Instead you need to do them on the browser side by sending messages to
|
||||||
using the easy [remote](../api/remote.md) module.
|
it, or using the easy [remote](../api/remote.md) module.
|
||||||
|
|
||||||
|
|
||||||
## Write your first atom-shell app
|
## Write your first atom-shell app
|
||||||
|
|
||||||
Generally, an atom-shell app would be structured like this (see the [hello-atom](https://github.com/dougnukem/hello-atom) repo for reference):
|
Generally, an atom-shell app would be structured like this (see the
|
||||||
|
[hello-atom](https://github.com/dougnukem/hello-atom) repo for reference):
|
||||||
|
|
||||||
```text
|
```text
|
||||||
your-app/
|
your-app/
|
||||||
|
@ -51,10 +54,10 @@ your-app/
|
||||||
└── index.html
|
└── index.html
|
||||||
```
|
```
|
||||||
|
|
||||||
The format of `package.json` is exactly the same as that of Node's modules, and the
|
The format of `package.json` is exactly the same as that of Node's modules, and
|
||||||
script specified by the `main` field is the startup script of your app, which
|
the script specified by the `main` field is the startup script of your app,
|
||||||
will run on the browser side. An example of your `package.json` might look like
|
which will run on the browser side. An example of your `package.json` might look
|
||||||
this:
|
like this:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
@ -123,8 +126,8 @@ Finally the `index.html` is the web page you want to show:
|
||||||
|
|
||||||
After you're done writing your app, you can create a distribution by
|
After you're done writing your app, you can create a distribution by
|
||||||
following the [Application distribution](./application-distribution.md) guide
|
following the [Application distribution](./application-distribution.md) guide
|
||||||
and then execute the packaged app.
|
and then execute the packaged app. You can also just use the downloaded
|
||||||
You can also just use the downloaded atom-shell binary to execute your app directly.
|
atom-shell binary to execute your app directly.
|
||||||
|
|
||||||
On Windows:
|
On Windows:
|
||||||
|
|
||||||
|
@ -144,4 +147,5 @@ On Mac OS X:
|
||||||
$ ./Atom.app/Contents/MacOS/Atom your-app/
|
$ ./Atom.app/Contents/MacOS/Atom your-app/
|
||||||
```
|
```
|
||||||
|
|
||||||
`Atom.app` here is part of the atom-shell's release package, you can download it from [here](https://github.com/atom/atom-shell/releases).
|
`Atom.app` here is part of the atom-shell's release package, you can download
|
||||||
|
it from [here](https://github.com/atom/atom-shell/releases).
|
||||||
|
|
375
spec/asar-spec.coffee
Normal file
375
spec/asar-spec.coffee
Normal file
|
@ -0,0 +1,375 @@
|
||||||
|
assert = require 'assert'
|
||||||
|
fs = require 'fs'
|
||||||
|
path = require 'path'
|
||||||
|
|
||||||
|
describe 'asar package', ->
|
||||||
|
fixtures = path.join __dirname, 'fixtures'
|
||||||
|
|
||||||
|
describe 'node api', ->
|
||||||
|
describe 'fs.readFileSync', ->
|
||||||
|
it 'reads a normal file', ->
|
||||||
|
file1 = path.join fixtures, 'asar', 'a.asar', 'file1'
|
||||||
|
assert.equal fs.readFileSync(file1).toString(), 'file1\n'
|
||||||
|
file2 = path.join fixtures, 'asar', 'a.asar', 'file2'
|
||||||
|
assert.equal fs.readFileSync(file2).toString(), 'file2\n'
|
||||||
|
file3 = path.join fixtures, 'asar', 'a.asar', 'file3'
|
||||||
|
assert.equal fs.readFileSync(file3).toString(), 'file3\n'
|
||||||
|
|
||||||
|
it 'reads a linked file', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link1'
|
||||||
|
assert.equal fs.readFileSync(p).toString(), 'file1\n'
|
||||||
|
|
||||||
|
it 'reads a file from linked directory', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'file1'
|
||||||
|
assert.equal fs.readFileSync(p).toString(), 'file1\n'
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2', 'file1'
|
||||||
|
assert.equal fs.readFileSync(p).toString(), 'file1\n'
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
throws = -> fs.readFileSync p
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'fs.readFile', ->
|
||||||
|
it 'reads a normal file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'file1'
|
||||||
|
fs.readFile p, (err, content) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal String(content), 'file1\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'reads a linked file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link1'
|
||||||
|
fs.readFile p, (err, content) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal String(content), 'file1\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'reads a file from linked directory', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2', 'file1'
|
||||||
|
fs.readFile p, (err, content) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal String(content), 'file1\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
fs.readFile p, (err, content) ->
|
||||||
|
assert.equal err.code, 'ENOENT'
|
||||||
|
done()
|
||||||
|
|
||||||
|
describe 'fs.lstatSync', ->
|
||||||
|
it 'returns information of root', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar'
|
||||||
|
stats = fs.lstatSync p
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), true
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
|
||||||
|
it 'returns information of a normal file', ->
|
||||||
|
for file in ['file1', 'file2', 'file3', path.join('dir1', 'file1'), path.join('link2', 'file1')]
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
stats = fs.lstatSync p
|
||||||
|
assert.equal stats.isFile(), true
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 6
|
||||||
|
|
||||||
|
it 'returns information of a normal directory', ->
|
||||||
|
for file in ['dir1', 'dir2', 'dir3']
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
stats = fs.lstatSync p
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), true
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
|
||||||
|
it 'returns information of a linked file', ->
|
||||||
|
for file in ['link1', path.join('dir1', 'link1'), path.join('link2', 'link2')]
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
stats = fs.lstatSync p
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), true
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
|
||||||
|
it 'returns information of a linked directory', ->
|
||||||
|
for file in ['link2', path.join('dir1', 'link2'), path.join('link2', 'link2')]
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
stats = fs.lstatSync p
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), true
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
for file in ['file4', 'file5', path.join('dir1', 'file4')]
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
throws = -> fs.lstatSync p
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'fs.lstat', ->
|
||||||
|
it 'returns information of root', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), true
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns information of a normal file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'file1'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal stats.isFile(), true
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 6
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns information of a normal directory', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'dir1'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), true
|
||||||
|
assert.equal stats.isSymbolicLink(), false
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns information of a linked file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link1'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), true
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns information of a linked directory', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal stats.isFile(), false
|
||||||
|
assert.equal stats.isDirectory(), false
|
||||||
|
assert.equal stats.isSymbolicLink(), true
|
||||||
|
assert.equal stats.size, 0
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'file4'
|
||||||
|
stats = fs.lstat p, (err, stats) ->
|
||||||
|
assert.equal err.code, 'ENOENT'
|
||||||
|
done()
|
||||||
|
|
||||||
|
describe 'fs.realpathSync', ->
|
||||||
|
it 'returns real path root', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = 'a.asar'
|
||||||
|
r = fs.realpathSync path.join(parent, p)
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
|
||||||
|
it 'returns real path of a normal file', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'file1'
|
||||||
|
r = fs.realpathSync path.join(parent, p)
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
|
||||||
|
it 'returns real path of a normal directory', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'dir1'
|
||||||
|
r = fs.realpathSync path.join(parent, p)
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
|
||||||
|
it 'returns real path of a linked file', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'link2', 'link1'
|
||||||
|
r = fs.realpathSync path.join(parent, p)
|
||||||
|
assert.equal r, path.join(parent, 'a.asar', 'file1')
|
||||||
|
|
||||||
|
it 'returns real path of a linked directory', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'link2', 'link2'
|
||||||
|
r = fs.realpathSync path.join(parent, p)
|
||||||
|
assert.equal r, path.join(parent, 'a.asar', 'dir1')
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'not-exist'
|
||||||
|
throws = -> fs.realpathSync path.join(parent, p)
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'fs.realpath', ->
|
||||||
|
it 'returns real path root', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = 'a.asar'
|
||||||
|
fs.realpath path.join(parent, p), (err, r) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns real path of a normal file', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'file1'
|
||||||
|
fs.realpath path.join(parent, p), (err, r) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns real path of a normal directory', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'dir1'
|
||||||
|
fs.realpath path.join(parent, p), (err, r) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal r, path.join(parent, p)
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns real path of a linked file', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'link2', 'link1'
|
||||||
|
fs.realpath path.join(parent, p), (err, r) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal r, path.join(parent, 'a.asar', 'file1')
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'returns real path of a linked directory', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'link2', 'link2'
|
||||||
|
fs.realpath path.join(parent, p), (err, r) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal r, path.join(parent, 'a.asar', 'dir1')
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', (done) ->
|
||||||
|
parent = fs.realpathSync path.join(fixtures, 'asar')
|
||||||
|
p = path.join 'a.asar', 'not-exist'
|
||||||
|
fs.realpath path.join(parent, p), (err, stats) ->
|
||||||
|
assert.equal err.code, 'ENOENT'
|
||||||
|
done()
|
||||||
|
|
||||||
|
describe 'fs.readdirSync', ->
|
||||||
|
it 'reads dirs from root', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar'
|
||||||
|
dirs = fs.readdirSync p
|
||||||
|
assert.deepEqual dirs, ['dir1', 'dir2', 'dir3', 'file1', 'file2', 'file3', 'link1', 'link2', 'ping.js']
|
||||||
|
|
||||||
|
it 'reads dirs from a normal dir', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'dir1'
|
||||||
|
dirs = fs.readdirSync p
|
||||||
|
assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2']
|
||||||
|
|
||||||
|
it 'reads dirs from a linked dir', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2'
|
||||||
|
dirs = fs.readdirSync p
|
||||||
|
assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2']
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
throws = -> fs.readdirSync p
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'fs.readdir', ->
|
||||||
|
it 'reads dirs from root', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar'
|
||||||
|
dirs = fs.readdir p, (err, dirs) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.deepEqual dirs, ['dir1', 'dir2', 'dir3', 'file1', 'file2', 'file3', 'link1', 'link2', 'ping.js']
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'reads dirs from a normal dir', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'dir1'
|
||||||
|
dirs = fs.readdir p, (err, dirs) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2']
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'reads dirs from a linked dir', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2'
|
||||||
|
dirs = fs.readdir p, (err, dirs) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2']
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
fs.readdir p, (err, stats) ->
|
||||||
|
assert.equal err.code, 'ENOENT'
|
||||||
|
done()
|
||||||
|
|
||||||
|
describe 'fs.openSync', ->
|
||||||
|
it 'opens a normal/linked/under-linked-directory file', ->
|
||||||
|
for file in ['file1', 'link1', path.join('link2', 'file1')]
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', file
|
||||||
|
fd = fs.openSync p, 'r'
|
||||||
|
buffer = new Buffer(6)
|
||||||
|
fs.readSync fd, buffer, 0, 6, 0
|
||||||
|
assert.equal String(buffer), 'file1\n'
|
||||||
|
fs.closeSync fd
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
throws = -> fs.openSync p
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'fs.open', ->
|
||||||
|
it 'opens a normal file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'file1'
|
||||||
|
fs.open p, 'r', (err, fd) ->
|
||||||
|
assert.equal err, null
|
||||||
|
buffer = new Buffer(6)
|
||||||
|
fs.read fd, buffer, 0, 6, 0, (err) ->
|
||||||
|
assert.equal err, null
|
||||||
|
assert.equal String(buffer), 'file1\n'
|
||||||
|
fs.close fd, done
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', (done) ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
fs.open p, (err, stats) ->
|
||||||
|
assert.equal err.code, 'ENOENT'
|
||||||
|
done()
|
||||||
|
|
||||||
|
describe 'child_process.fork', ->
|
||||||
|
child_process = require 'child_process'
|
||||||
|
|
||||||
|
it 'opens a normal js file', (done) ->
|
||||||
|
child = child_process.fork path.join(fixtures, 'asar', 'a.asar', 'ping.js')
|
||||||
|
child.on 'message', (msg) ->
|
||||||
|
assert.equal msg, 'message'
|
||||||
|
done()
|
||||||
|
child.send 'message'
|
||||||
|
|
||||||
|
it 'throws ENOENT error when can not find file', ->
|
||||||
|
p = path.join fixtures, 'asar', 'a.asar', 'not-exist'
|
||||||
|
throws = -> child_process.fork p
|
||||||
|
assert.throws throws, /ENOENT/
|
||||||
|
|
||||||
|
describe 'asar protocol', ->
|
||||||
|
it 'can request a file in package', (done) ->
|
||||||
|
p = path.resolve fixtures, 'asar', 'a.asar', 'file1'
|
||||||
|
$.get "asar:#{p}", (data) ->
|
||||||
|
assert.equal data, 'file1\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'can request a linked file in package', (done) ->
|
||||||
|
p = path.resolve fixtures, 'asar', 'a.asar', 'link2', 'link1'
|
||||||
|
$.get "asar:#{p}", (data) ->
|
||||||
|
assert.equal data, 'file1\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'can request a file in filesystem', (done) ->
|
||||||
|
p = path.resolve fixtures, 'asar', 'file'
|
||||||
|
$.get "asar:#{p}", (data) ->
|
||||||
|
assert.equal data, 'file\n'
|
||||||
|
done()
|
||||||
|
|
||||||
|
it 'gets 404 when file is not found', (done) ->
|
||||||
|
p = path.resolve fixtures, 'asar', 'a.asar', 'no-exist'
|
||||||
|
$.ajax
|
||||||
|
url: "asar:#{p}"
|
||||||
|
error: (err) ->
|
||||||
|
assert.equal err.status, 404
|
||||||
|
done()
|
BIN
spec/fixtures/asar/a.asar
vendored
Normal file
BIN
spec/fixtures/asar/a.asar
vendored
Normal file
Binary file not shown.
1
spec/fixtures/asar/file
vendored
Normal file
1
spec/fixtures/asar/file
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
file
|
2
vendor/native_mate
vendored
2
vendor/native_mate
vendored
|
@ -1 +1 @@
|
||||||
Subproject commit 12f4e9b7ea0038e58e52839142eff0a4d17069bf
|
Subproject commit c5b39126ee7388acc61a25ac6b5fefb7a2cd6262
|
Loading…
Add table
Add a link
Reference in a new issue