diff --git a/atom.gyp b/atom.gyp index fe8af55045d0..b98b5e2b64f5 100644 --- a/atom.gyp +++ b/atom.gyp @@ -39,6 +39,7 @@ 'atom/common/api/lib/screen.coffee', 'atom/common/api/lib/shell.coffee', 'atom/common/lib/init.coffee', + 'atom/common/lib/asar.coffee', 'atom/renderer/lib/chrome-api.coffee', 'atom/renderer/lib/init.coffee', 'atom/renderer/lib/inspector.coffee', @@ -121,6 +122,10 @@ 'atom/browser/native_window_observer.h', 'atom/browser/net/adapter_request_job.cc', 'atom/browser/net/adapter_request_job.h', + 'atom/browser/net/asar/asar_protocol_handler.cc', + 'atom/browser/net/asar/asar_protocol_handler.h', + 'atom/browser/net/asar/url_request_asar_job.cc', + 'atom/browser/net/asar/url_request_asar_job.h', 'atom/browser/net/atom_url_request_job_factory.cc', 'atom/browser/net/atom_url_request_job_factory.h', 'atom/browser/net/url_request_string_job.cc', @@ -174,6 +179,7 @@ 'atom/browser/window_list.h', 'atom/browser/window_list_observer.h', 'atom/common/api/api_messages.h', + 'atom/common/api/atom_api_asar.cc', 'atom/common/api/atom_api_clipboard.cc', 'atom/common/api/atom_api_crash_reporter.cc', 'atom/common/api/atom_api_id_weak_map.cc', @@ -186,6 +192,10 @@ 'atom/common/api/atom_bindings.h', 'atom/common/api/object_life_monitor.cc', 'atom/common/api/object_life_monitor.h', + 'atom/common/asar/archive.cc', + 'atom/common/asar/archive.h', + 'atom/common/asar/scoped_temporary_file.cc', + 'atom/common/asar/scoped_temporary_file.h', 'atom/common/common_message_generator.cc', 'atom/common/common_message_generator.h', 'atom/common/crash_reporter/crash_reporter.cc', diff --git a/atom/browser/api/atom_api_app.cc b/atom/browser/api/atom_api_app.cc index 858e6ef197e8..7331b66a8087 100644 --- a/atom/browser/api/atom_api_app.cc +++ b/atom/browser/api/atom_api_app.cc @@ -91,6 +91,10 @@ void App::OnWindowAllClosed() { Emit("window-all-closed"); } +void App::OnQuit() { + Emit("quit"); +} + void App::OnOpenFile(bool* prevent_default, const std::string& file_path) { base::ListValue args; args.AppendString(file_path); diff --git a/atom/browser/api/atom_api_app.h b/atom/browser/api/atom_api_app.h index 0fcbf2c0a907..2afa563558f4 100644 --- a/atom/browser/api/atom_api_app.h +++ b/atom/browser/api/atom_api_app.h @@ -36,6 +36,7 @@ class App : public mate::EventEmitter, // BrowserObserver implementations: virtual void OnWillQuit(bool* prevent_default) OVERRIDE; virtual void OnWindowAllClosed() OVERRIDE; + virtual void OnQuit() OVERRIDE; virtual void OnOpenFile(bool* prevent_default, const std::string& file_path) OVERRIDE; virtual void OnOpenURL(const std::string& url) OVERRIDE; diff --git a/atom/browser/atom_browser_context.cc b/atom/browser/atom_browser_context.cc index d3b40fe1ef99..4c4ac3d384c9 100644 --- a/atom/browser/atom_browser_context.cc +++ b/atom/browser/atom_browser_context.cc @@ -6,6 +6,7 @@ #include "atom/browser/atom_browser_main_parts.h" #include "atom/browser/net/atom_url_request_job_factory.h" +#include "atom/browser/net/asar/asar_protocol_handler.h" #include "base/threading/sequenced_worker_pool.h" #include "base/threading/worker_pool.h" #include "chrome/browser/browser_process.h" @@ -20,6 +21,12 @@ using content::BrowserThread; namespace atom { +namespace { + +const char* kAsarScheme = "asar"; + +} // namespace + AtomBrowserContext::AtomBrowserContext() : fake_browser_process_(new BrowserProcess), job_factory_(new AtomURLRequestJobFactory) { @@ -44,6 +51,10 @@ net::URLRequestJobFactory* AtomBrowserContext::CreateURLRequestJobFactory( url::kFileScheme, new net::FileProtocolHandler( BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior( base::SequencedWorkerPool::SKIP_ON_SHUTDOWN))); + job_factory->SetProtocolHandler( + kAsarScheme, new asar::AsarProtocolHandler( + BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior( + base::SequencedWorkerPool::SKIP_ON_SHUTDOWN))); // Set up interceptors in the reverse order. scoped_ptr top_job_factory = diff --git a/atom/browser/browser.cc b/atom/browser/browser.cc index afea053c66e2..9f31177d31ae 100644 --- a/atom/browser/browser.cc +++ b/atom/browser/browser.cc @@ -37,6 +37,8 @@ void Browser::Quit() { } void Browser::Shutdown() { + FOR_EACH_OBSERVER(BrowserObserver, observers_, OnQuit()); + is_quiting_ = true; base::MessageLoop::current()->Quit(); } diff --git a/atom/browser/browser_observer.h b/atom/browser/browser_observer.h index 0f340b007947..df0243abe583 100644 --- a/atom/browser/browser_observer.h +++ b/atom/browser/browser_observer.h @@ -18,6 +18,9 @@ class BrowserObserver { // method will not be called, instead it will call OnWillQuit. virtual void OnWindowAllClosed() {} + // The browser is quitting. + virtual void OnQuit() {} + // The browser has opened a file by double clicking in Finder or dragging the // file to the Dock icon. (OS X only) virtual void OnOpenFile(bool* prevent_default, diff --git a/atom/browser/lib/init.coffee b/atom/browser/lib/init.coffee index d25179c1ba49..ddfa56169611 100644 --- a/atom/browser/lib/init.coffee +++ b/atom/browser/lib/init.coffee @@ -52,20 +52,25 @@ setImmediate -> detail: message buttons: ['OK'] + # Emit 'exit' event on quit. + require('app').on 'quit', -> + process.emit 'exit' + # Load the RPC server. require './rpc-server.js' # Now we try to load app's package.json. packageJson = null - packagePath = path.join process.resourcesPath, 'app' - try - # First we try to load process.resourcesPath/app - packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json'))) - catch error - # If not found then we load browser/default_app - packagePath = path.join process.resourcesPath, 'default_app' - packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json'))) + searchPaths = [ 'app', 'app.asar', 'default_app' ] + for packagePath in searchPaths + try + packagePath = path.join process.resourcesPath, packagePath + packageJson = JSON.parse(fs.readFileSync(path.join(packagePath, 'package.json'))) + catch e + continue + + throw new Error("Unable to find a valid app") unless packageJson? # Set application's version. app = require 'app' @@ -80,7 +85,7 @@ setImmediate -> # Set application's desktop name. if packageJson.desktopName? app.setDesktopName packageJson.desktopName - else + else app.setDesktopName '#{app.getName()}.desktop' # Load the chrome extension support. diff --git a/atom/browser/net/asar/asar_protocol_handler.cc b/atom/browser/net/asar/asar_protocol_handler.cc new file mode 100644 index 000000000000..04f0aa94dd3f --- /dev/null +++ b/atom/browser/net/asar/asar_protocol_handler.cc @@ -0,0 +1,91 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#include "atom/browser/net/asar/asar_protocol_handler.h" + +#include "atom/browser/net/asar/url_request_asar_job.h" +#include "atom/common/asar/archive.h" +#include "net/base/filename_util.h" +#include "net/base/net_errors.h" +#include "net/url_request/url_request_error_job.h" +#include "net/url_request/url_request_file_job.h" + +namespace asar { + +namespace { + +const base::FilePath::CharType kAsarExtension[] = FILE_PATH_LITERAL(".asar"); + +// Get the relative path in asar archive. +bool GetAsarPath(const base::FilePath& full_path, + base::FilePath* asar_path, + base::FilePath* relative_path) { + base::FilePath iter = full_path; + while (true) { + base::FilePath dirname = iter.DirName(); + if (iter.MatchesExtension(kAsarExtension)) + break; + else if (iter == dirname) + return false; + iter = dirname; + } + + base::FilePath tail; + if (!iter.AppendRelativePath(full_path, &tail)) + return false; + + *asar_path = iter; + *relative_path = tail; + return true; +} + +} // namespace + +AsarProtocolHandler::AsarProtocolHandler( + const scoped_refptr& file_task_runner) + : file_task_runner_(file_task_runner) {} + +AsarProtocolHandler::~AsarProtocolHandler() { +} + +Archive* AsarProtocolHandler::GetOrCreateAsarArchive( + const base::FilePath& path) const { + if (!archives_.contains(path)) { + scoped_ptr archive(new Archive(path)); + if (!archive->Init()) + return nullptr; + + archives_.set(path, archive.Pass()); + } + + return archives_.get(path); +} + +net::URLRequestJob* AsarProtocolHandler::MaybeCreateJob( + net::URLRequest* request, + net::NetworkDelegate* network_delegate) const { + base::FilePath full_path; + net::FileURLToFilePath(request->url(), &full_path); + + // Create asar:// job when the path contains "xxx.asar/", otherwise treat the + // URL request as file://. + base::FilePath asar_path, relative_path; + if (!GetAsarPath(full_path, &asar_path, &relative_path)) + return new net::URLRequestFileJob(request, network_delegate, full_path, + file_task_runner_); + + Archive* archive = GetOrCreateAsarArchive(asar_path); + if (!archive) + return new net::URLRequestErrorJob(request, network_delegate, + net::ERR_FILE_NOT_FOUND); + + return new URLRequestAsarJob(request, network_delegate, archive, + relative_path, file_task_runner_); +} + +bool AsarProtocolHandler::IsSafeRedirectTarget(const GURL& location) const { + return false; +} + +} // namespace asar diff --git a/atom/browser/net/asar/asar_protocol_handler.h b/atom/browser/net/asar/asar_protocol_handler.h new file mode 100644 index 000000000000..e88ee9b8691b --- /dev/null +++ b/atom/browser/net/asar/asar_protocol_handler.h @@ -0,0 +1,45 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#ifndef ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_ +#define ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_ + +#include "base/containers/scoped_ptr_hash_map.h" +#include "base/files/file_path.h" +#include "base/memory/ref_counted.h" +#include "net/url_request/url_request_job_factory.h" + +namespace base { +class TaskRunner; +} + +namespace asar { + +class Archive; + +class AsarProtocolHandler : public net::URLRequestJobFactory::ProtocolHandler { + public: + explicit AsarProtocolHandler( + const scoped_refptr& file_task_runner); + virtual ~AsarProtocolHandler(); + + Archive* GetOrCreateAsarArchive(const base::FilePath& path) const; + + // net::URLRequestJobFactory::ProtocolHandler: + virtual net::URLRequestJob* MaybeCreateJob( + net::URLRequest* request, + net::NetworkDelegate* network_delegate) const OVERRIDE; + virtual bool IsSafeRedirectTarget(const GURL& location) const OVERRIDE; + + private: + const scoped_refptr file_task_runner_; + + mutable base::ScopedPtrHashMap archives_; + + DISALLOW_COPY_AND_ASSIGN(AsarProtocolHandler); +}; + +} // namespace asar + +#endif // ATOM_BROWSER_NET_ASAR_ASAR_PROTOCOL_HANDLER_H_ diff --git a/atom/browser/net/asar/url_request_asar_job.cc b/atom/browser/net/asar/url_request_asar_job.cc new file mode 100644 index 000000000000..a66eaf57d717 --- /dev/null +++ b/atom/browser/net/asar/url_request_asar_job.cc @@ -0,0 +1,142 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#include "atom/browser/net/asar/url_request_asar_job.h" + +#include + +#include "net/base/file_stream.h" +#include "net/base/io_buffer.h" +#include "net/base/mime_util.h" +#include "net/base/net_errors.h" +#include "net/url_request/url_request_status.h" + +namespace asar { + +URLRequestAsarJob::URLRequestAsarJob( + net::URLRequest* request, + net::NetworkDelegate* network_delegate, + Archive* archive, + const base::FilePath& file_path, + const scoped_refptr& file_task_runner) + : net::URLRequestJob(request, network_delegate), + archive_(archive), + file_path_(file_path), + stream_(new net::FileStream(file_task_runner)), + remaining_bytes_(0), + file_task_runner_(file_task_runner), + weak_ptr_factory_(this) {} + +URLRequestAsarJob::~URLRequestAsarJob() {} + +void URLRequestAsarJob::Start() { + if (!archive_ || !archive_->GetFileInfo(file_path_, &file_info_)) { + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, + net::ERR_FILE_NOT_FOUND)); + return; + } + + remaining_bytes_ = static_cast(file_info_.size); + + int flags = base::File::FLAG_OPEN | + base::File::FLAG_READ | + base::File::FLAG_ASYNC; + int rv = stream_->Open(archive_->path(), flags, + base::Bind(&URLRequestAsarJob::DidOpen, + weak_ptr_factory_.GetWeakPtr())); + if (rv != net::ERR_IO_PENDING) + DidOpen(rv); +} + +void URLRequestAsarJob::Kill() { + weak_ptr_factory_.InvalidateWeakPtrs(); + URLRequestJob::Kill(); +} + +bool URLRequestAsarJob::ReadRawData(net::IOBuffer* dest, + int dest_size, + int* bytes_read) { + if (remaining_bytes_ < dest_size) + dest_size = static_cast(remaining_bytes_); + + // If we should copy zero bytes because |remaining_bytes_| is zero, short + // circuit here. + if (!dest_size) { + *bytes_read = 0; + return true; + } + + int rv = stream_->Read(dest, + dest_size, + base::Bind(&URLRequestAsarJob::DidRead, + weak_ptr_factory_.GetWeakPtr(), + make_scoped_refptr(dest))); + if (rv >= 0) { + // Data is immediately available. + *bytes_read = rv; + remaining_bytes_ -= rv; + DCHECK_GE(remaining_bytes_, 0); + return true; + } + + // Otherwise, a read error occured. We may just need to wait... + if (rv == net::ERR_IO_PENDING) { + SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); + } else { + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); + } + return false; +} + +bool URLRequestAsarJob::GetMimeType(std::string* mime_type) const { + return net::GetMimeTypeFromFile(file_path_, mime_type); +} + +void URLRequestAsarJob::DidOpen(int result) { + if (result != net::OK) { + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); + return; + } + + int rv = stream_->Seek(net::FROM_BEGIN, + file_info_.offset, + base::Bind(&URLRequestAsarJob::DidSeek, + weak_ptr_factory_.GetWeakPtr())); + if (rv != net::ERR_IO_PENDING) { + // stream_->Seek() failed, so pass an intentionally erroneous value + // into DidSeek(). + DidSeek(-1); + } +} + +void URLRequestAsarJob::DidSeek(int64 result) { + if (result != static_cast(file_info_.offset)) { + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, + net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); + return; + } + + set_expected_content_size(remaining_bytes_); + NotifyHeadersComplete(); +} + +void URLRequestAsarJob::DidRead(scoped_refptr buf, int result) { + if (result > 0) { + SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status + remaining_bytes_ -= result; + DCHECK_GE(remaining_bytes_, 0); + } + + buf = NULL; + + if (result == 0) { + NotifyDone(net::URLRequestStatus()); + } else if (result < 0) { + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); + } + + NotifyReadComplete(result); +} + +} // namespace asar diff --git a/atom/browser/net/asar/url_request_asar_job.h b/atom/browser/net/asar/url_request_asar_job.h new file mode 100644 index 000000000000..f228d4312c74 --- /dev/null +++ b/atom/browser/net/asar/url_request_asar_job.h @@ -0,0 +1,72 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#ifndef ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_ +#define ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_ + +#include + +#include "atom/common/asar/archive.h" +#include "base/files/file_path.h" +#include "base/memory/ref_counted.h" +#include "base/memory/weak_ptr.h" +#include "net/url_request/url_request_job.h" + +namespace base { +class TaskRunner; +} + +namespace net { +class FileStream; +} + +namespace asar { + +class URLRequestAsarJob : public net::URLRequestJob { + public: + URLRequestAsarJob(net::URLRequest* request, + net::NetworkDelegate* network_delegate, + Archive* archive, + const base::FilePath& file_path, + const scoped_refptr& file_task_runner); + + // net::URLRequestJob: + virtual void Start() OVERRIDE; + virtual void Kill() OVERRIDE; + virtual bool ReadRawData(net::IOBuffer* buf, + int buf_size, + int* bytes_read) OVERRIDE; + virtual bool GetMimeType(std::string* mime_type) const OVERRIDE; + + protected: + virtual ~URLRequestAsarJob(); + + private: + // Callback after opening file on a background thread. + void DidOpen(int result); + + // Callback after seeking to the beginning of |byte_range_| in the file + // on a background thread. + void DidSeek(int64 result); + + // Callback after data is asynchronously read from the file into |buf|. + void DidRead(scoped_refptr buf, int result); + + Archive* archive_; + Archive::FileInfo file_info_; + base::FilePath file_path_; + + scoped_ptr stream_; + int64 remaining_bytes_; + + const scoped_refptr file_task_runner_; + + base::WeakPtrFactory weak_ptr_factory_; + + DISALLOW_COPY_AND_ASSIGN(URLRequestAsarJob); +}; + +} // namespace asar + +#endif // ATOM_BROWSER_NET_ASAR_URL_REQUEST_ASAR_JOB_H_ diff --git a/atom/common/api/atom_api_asar.cc b/atom/common/api/atom_api_asar.cc new file mode 100644 index 000000000000..5cc2616ac0b3 --- /dev/null +++ b/atom/common/api/atom_api_asar.cc @@ -0,0 +1,117 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#include + +#include "atom/common/asar/archive.h" +#include "atom/common/native_mate_converters/file_path_converter.h" +#include "native_mate/arguments.h" +#include "native_mate/dictionary.h" +#include "native_mate/object_template_builder.h" +#include "native_mate/wrappable.h" + +#include "atom/common/node_includes.h" + +namespace { + +class Archive : public mate::Wrappable { + public: + static v8::Handle Create(v8::Isolate* isolate, + const base::FilePath& path) { + scoped_ptr archive(new asar::Archive(path)); + if (!archive->Init()) + return v8::False(isolate); + return (new Archive(archive.Pass()))->GetWrapper(isolate); + } + + protected: + explicit Archive(scoped_ptr archive) + : archive_(archive.Pass()) {} + + // Reads the offset and size of file. + v8::Handle GetFileInfo(v8::Isolate* isolate, + const base::FilePath& path) { + asar::Archive::FileInfo info; + if (!archive_ || !archive_->GetFileInfo(path, &info)) + return v8::False(isolate); + mate::Dictionary dict(isolate, v8::Object::New(isolate)); + dict.Set("size", info.size); + dict.Set("offset", info.offset); + return dict.GetHandle(); + } + + // Returns a fake result of fs.stat(path). + v8::Handle Stat(v8::Isolate* isolate, + const base::FilePath& path) { + asar::Archive::Stats stats; + if (!archive_ || !archive_->Stat(path, &stats)) + return v8::False(isolate); + mate::Dictionary dict(isolate, v8::Object::New(isolate)); + dict.Set("size", stats.size); + dict.Set("offset", stats.offset); + dict.Set("isFile", stats.is_file); + dict.Set("isDirectory", stats.is_directory); + dict.Set("isLink", stats.is_link); + return dict.GetHandle(); + } + + // Returns all files under a directory. + v8::Handle Readdir(v8::Isolate* isolate, + const base::FilePath& path) { + std::vector files; + if (!archive_ || !archive_->Readdir(path, &files)) + return v8::False(isolate); + return mate::ConvertToV8(isolate, files); + } + + // Returns the path of file with symbol link resolved. + v8::Handle Realpath(v8::Isolate* isolate, + const base::FilePath& path) { + base::FilePath realpath; + if (!archive_ || !archive_->Realpath(path, &realpath)) + return v8::False(isolate); + return mate::ConvertToV8(isolate, realpath); + } + + // Copy the file out into a temporary file and returns the new path. + v8::Handle CopyFileOut(v8::Isolate* isolate, + const base::FilePath& path) { + base::FilePath new_path; + if (!archive_ || !archive_->CopyFileOut(path, &new_path)) + return v8::False(isolate); + return mate::ConvertToV8(isolate, new_path); + } + + // Free the resources used by archive. + void Destroy() { + archive_.reset(); + } + + // mate::Wrappable: + mate::ObjectTemplateBuilder GetObjectTemplateBuilder(v8::Isolate* isolate) { + return mate::ObjectTemplateBuilder(isolate) + .SetValue("path", archive_->path()) + .SetMethod("getFileInfo", &Archive::GetFileInfo) + .SetMethod("stat", &Archive::Stat) + .SetMethod("readdir", &Archive::Readdir) + .SetMethod("realpath", &Archive::Realpath) + .SetMethod("copyFileOut", &Archive::CopyFileOut) + .SetMethod("destroy", &Archive::Destroy); + } + + private: + scoped_ptr archive_; + + DISALLOW_COPY_AND_ASSIGN(Archive); +}; + +void Initialize(v8::Handle exports, v8::Handle unused, + v8::Handle context, void* priv) { + mate::Dictionary dict(context->GetIsolate(), exports); + dict.SetMethod("createArchive", &Archive::Create); +} + +} // namespace + +NODE_MODULE_CONTEXT_AWARE_BUILTIN(atom_common_asar, Initialize) diff --git a/atom/common/asar/archive.cc b/atom/common/asar/archive.cc new file mode 100644 index 000000000000..5ff97f267bb6 --- /dev/null +++ b/atom/common/asar/archive.cc @@ -0,0 +1,252 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#include "atom/common/asar/archive.h" + +#include +#include + +#include "atom/common/asar/scoped_temporary_file.h" +#include "base/files/file.h" +#include "base/logging.h" +#include "base/pickle.h" +#include "base/json/json_string_value_serializer.h" +#include "base/strings/string_number_conversions.h" + +namespace asar { + +namespace { + +#if defined(OS_WIN) +const char kSeparators[] = "\\/"; +#else +const char kSeparators[] = "/"; +#endif + +bool GetNodeFromPath(std::string path, + const base::DictionaryValue* root, + const base::DictionaryValue** out); + +// Gets the "files" from "dir". +bool GetFilesNode(const base::DictionaryValue* root, + const base::DictionaryValue* dir, + const base::DictionaryValue** out) { + // Test for symbol linked directory. + std::string link; + if (dir->GetStringWithoutPathExpansion("link", &link)) { + const base::DictionaryValue* linked_node = NULL; + if (!GetNodeFromPath(link, root, &linked_node)) + return false; + dir = linked_node; + } + + return dir->GetDictionaryWithoutPathExpansion("files", out); +} + +// Gets sub-file "name" from "dir". +bool GetChildNode(const base::DictionaryValue* root, + const std::string& name, + const base::DictionaryValue* dir, + const base::DictionaryValue** out) { + const base::DictionaryValue* files = NULL; + return GetFilesNode(root, dir, &files) && + files->GetDictionaryWithoutPathExpansion(name, out); +} + +// Gets the node of "path" from "root". +bool GetNodeFromPath(std::string path, + const base::DictionaryValue* root, + const base::DictionaryValue** out) { + if (path == "") { + *out = root; + return true; + } + + const base::DictionaryValue* dir = root; + for (size_t delimiter_position = path.find_first_of(kSeparators); + delimiter_position != std::string::npos; + delimiter_position = path.find_first_of(kSeparators)) { + const base::DictionaryValue* child = NULL; + if (!GetChildNode(root, path.substr(0, delimiter_position), dir, &child)) + return false; + + dir = child; + path.erase(0, delimiter_position + 1); + } + + return GetChildNode(root, path, dir, out); +} + +bool FillFileInfoWithNode(Archive::FileInfo* info, + uint32 header_size, + const base::DictionaryValue* node) { + std::string offset; + if (!node->GetString("offset", &offset)) + return false; + if (!base::StringToUint64(offset, &info->offset)) + return false; + + int size; + if (!node->GetInteger("size", &size)) + return false; + + info->offset += header_size; + info->size = static_cast(size); + return true; +} + +} // namespace + +Archive::Archive(const base::FilePath& path) + : path_(path), + header_size_(0) { +} + +Archive::~Archive() { +} + +bool Archive::Init() { + base::File file(path_, base::File::FLAG_OPEN | base::File::FLAG_READ); + if (!file.IsValid()) + return false; + + std::vector buf; + int len; + + buf.resize(8); + len = file.ReadAtCurrentPos(buf.data(), buf.size()); + if (len != static_cast(buf.size())) { + PLOG(ERROR) << "Failed to read header size from " << path_.value(); + return false; + } + + uint32 size; + if (!PickleIterator(Pickle(buf.data(), buf.size())).ReadUInt32(&size)) { + LOG(ERROR) << "Failed to parse header size from " << path_.value(); + return false; + } + + buf.resize(size); + len = file.ReadAtCurrentPos(buf.data(), buf.size()); + if (len != static_cast(buf.size())) { + PLOG(ERROR) << "Failed to read header from " << path_.value(); + return false; + } + + std::string header; + if (!PickleIterator(Pickle(buf.data(), buf.size())).ReadString(&header)) { + LOG(ERROR) << "Failed to parse header from " << path_.value(); + return false; + } + + std::string error; + JSONStringValueSerializer serializer(&header); + base::Value* value = serializer.Deserialize(NULL, &error); + if (!value || !value->IsType(base::Value::TYPE_DICTIONARY)) { + LOG(ERROR) << "Failed to parse header: " << error; + return false; + } + + header_size_ = 8 + size; + header_.reset(static_cast(value)); + return true; +} + +bool Archive::GetFileInfo(const base::FilePath& path, FileInfo* info) { + if (!header_) + return false; + + const base::DictionaryValue* node; + if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node)) + return false; + + std::string link; + if (node->GetString("link", &link)) + return GetFileInfo(base::FilePath::FromUTF8Unsafe(link), info); + + return FillFileInfoWithNode(info, header_size_, node); +} + +bool Archive::Stat(const base::FilePath& path, Stats* stats) { + if (!header_) + return false; + + const base::DictionaryValue* node; + if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node)) + return false; + + if (node->HasKey("link")) { + stats->is_file = false; + stats->is_link = true; + return true; + } + + if (node->HasKey("files")) { + stats->is_file = false; + stats->is_directory = true; + return true; + } + + return FillFileInfoWithNode(stats, header_size_, node); +} + +bool Archive::Readdir(const base::FilePath& path, + std::vector* list) { + if (!header_) + return false; + + const base::DictionaryValue* node; + if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node)) + return false; + + const base::DictionaryValue* files; + if (!GetFilesNode(header_.get(), node, &files)) + return false; + + base::DictionaryValue::Iterator iter(*files); + while (!iter.IsAtEnd()) { + list->push_back(base::FilePath::FromUTF8Unsafe(iter.key())); + iter.Advance(); + } + return true; +} + +bool Archive::Realpath(const base::FilePath& path, base::FilePath* realpath) { + if (!header_) + return false; + + const base::DictionaryValue* node; + if (!GetNodeFromPath(path.AsUTF8Unsafe(), header_.get(), &node)) + return false; + + std::string link; + if (node->GetString("link", &link)) { + *realpath = base::FilePath::FromUTF8Unsafe(link); + return true; + } + + *realpath = path; + return true; +} + +bool Archive::CopyFileOut(const base::FilePath& path, base::FilePath* out) { + if (external_files_.contains(path)) { + *out = external_files_.get(path)->path(); + return true; + } + + FileInfo info; + if (!GetFileInfo(path, &info)) + return false; + + scoped_ptr temp_file(new ScopedTemporaryFile); + if (!temp_file->InitFromFile(path_, info.offset, info.size)) + return false; + + *out = temp_file->path(); + external_files_.set(path, temp_file.Pass()); + return true; +} + +} // namespace asar diff --git a/atom/common/asar/archive.h b/atom/common/asar/archive.h new file mode 100644 index 000000000000..0058f0269d8a --- /dev/null +++ b/atom/common/asar/archive.h @@ -0,0 +1,76 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#ifndef ATOM_COMMON_ASAR_ARCHIVE_H_ +#define ATOM_COMMON_ASAR_ARCHIVE_H_ + +#include + +#include "base/containers/scoped_ptr_hash_map.h" +#include "base/files/file_path.h" +#include "base/memory/scoped_ptr.h" + +namespace base { +class DictionaryValue; +} + +namespace asar { + +class ScopedTemporaryFile; + +// This class represents an asar package, and provides methods to read +// information from it. +class Archive { + public: + struct FileInfo { + FileInfo() : size(0), offset(0) {} + uint32 size; + uint64 offset; + }; + + struct Stats : public FileInfo { + Stats() : is_file(true), is_directory(false), is_link(false) {} + bool is_file; + bool is_directory; + bool is_link; + }; + + explicit Archive(const base::FilePath& path); + virtual ~Archive(); + + // Read and parse the header. + bool Init(); + + // Get the info of a file. + bool GetFileInfo(const base::FilePath& path, FileInfo* info); + + // Fs.stat(path). + bool Stat(const base::FilePath& path, Stats* stats); + + // Fs.readdir(path). + bool Readdir(const base::FilePath& path, std::vector* files); + + // Fs.realpath(path). + bool Realpath(const base::FilePath& path, base::FilePath* realpath); + + // Copy the file into a temporary file, and return the new path. + bool CopyFileOut(const base::FilePath& path, base::FilePath* out); + + base::FilePath path() const { return path_; } + base::DictionaryValue* header() const { return header_.get(); } + + private: + base::FilePath path_; + uint32 header_size_; + scoped_ptr header_; + + // Cached external temporary files. + base::ScopedPtrHashMap external_files_; + + DISALLOW_COPY_AND_ASSIGN(Archive); +}; + +} // namespace asar + +#endif // ATOM_COMMON_ASAR_ARCHIVE_H_ diff --git a/atom/common/asar/scoped_temporary_file.cc b/atom/common/asar/scoped_temporary_file.cc new file mode 100644 index 000000000000..799f09d88d21 --- /dev/null +++ b/atom/common/asar/scoped_temporary_file.cc @@ -0,0 +1,54 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#include "atom/common/asar/scoped_temporary_file.h" + +#include + +#include "base/file_util.h" +#include "base/threading/thread_restrictions.h" + +namespace asar { + +ScopedTemporaryFile::ScopedTemporaryFile() { +} + +ScopedTemporaryFile::~ScopedTemporaryFile() { + if (!path_.empty()) { + base::ThreadRestrictions::ScopedAllowIO allow_io; + base::DeleteFile(path_, false); + } +} + +bool ScopedTemporaryFile::Init() { + if (!path_.empty()) + return true; + + base::ThreadRestrictions::ScopedAllowIO allow_io; + return base::CreateTemporaryFile(&path_); +} + +bool ScopedTemporaryFile::InitFromFile(const base::FilePath& path, + uint64 offset, uint64 size) { + if (!Init()) + return false; + + base::File src(path, base::File::FLAG_OPEN | base::File::FLAG_READ); + if (!src.IsValid()) + return false; + + std::vector buf(size); + int len = src.Read(offset, buf.data(), buf.size()); + if (len != static_cast(size)) + return false; + + base::File dest(path_, base::File::FLAG_OPEN | base::File::FLAG_WRITE); + if (!dest.IsValid()) + return false; + + return dest.WriteAtCurrentPos(buf.data(), buf.size()) == + static_cast(size); +} + +} // namespace asar diff --git a/atom/common/asar/scoped_temporary_file.h b/atom/common/asar/scoped_temporary_file.h new file mode 100644 index 000000000000..69e76a05c56e --- /dev/null +++ b/atom/common/asar/scoped_temporary_file.h @@ -0,0 +1,37 @@ +// Copyright (c) 2014 GitHub, Inc. All rights reserved. +// Use of this source code is governed by the MIT license that can be +// found in the LICENSE file. + +#ifndef ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_ +#define ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_ + +#include "base/files/file_path.h" + +namespace asar { + +// An object representing a temporary file that should be cleaned up when this +// object goes out of scope. Note that since deletion occurs during the +// destructor, no further error handling is possible if the directory fails to +// be deleted. As a result, deletion is not guaranteed by this class. +class ScopedTemporaryFile { + public: + ScopedTemporaryFile(); + virtual ~ScopedTemporaryFile(); + + // Init an empty temporary file. + bool Init(); + + // Init an temporary file and fill it with content of |path|. + bool InitFromFile(const base::FilePath& path, uint64 offset, uint64 size); + + base::FilePath path() const { return path_; } + + private: + base::FilePath path_; + + DISALLOW_COPY_AND_ASSIGN(ScopedTemporaryFile); +}; + +} // namespace asar + +#endif // ATOM_COMMON_ASAR_SCOPED_TEMPORARY_FILE_H_ diff --git a/atom/common/lib/asar.coffee b/atom/common/lib/asar.coffee new file mode 100644 index 000000000000..8d92563fc67b --- /dev/null +++ b/atom/common/lib/asar.coffee @@ -0,0 +1,296 @@ +asar = process.atomBinding 'asar' +child_process = require 'child_process' +fs = require 'fs' +path = require 'path' +util = require 'util' + +# Cache asar archive objects. +cachedArchives = {} +getOrCreateArchive = (p) -> + archive = cachedArchives[p] + return archive if archive? + archive = asar.createArchive p + return false unless archive + cachedArchives[p] = archive + +# Clean cache on quit. +process.on 'exit', -> + archive.destroy() for p, archive of cachedArchives + +# Separate asar package's path from full path. +splitPath = (p) -> + return [false] if typeof p isnt 'string' + return [true, p, ''] if p.substr(-5) is '.asar' + index = p.lastIndexOf ".asar#{path.sep}" + return [false] if index is -1 + [true, p.substr(0, index + 5), p.substr(index + 6)] + +# Convert asar archive's Stats object to fs's Stats object. +nextInode = 0 +uid = if process.getuid? then process.getuid() else 0 +gid = if process.getgid? then process.getgid() else 0 +asarStatsToFsStats = (stats) -> + { + dev: 1, + ino: ++nextInode, + mode: 33188, + nlink: 1, + uid: uid, + gid: gid, + rdev: 0, + size: stats.size + isFile: -> stats.isFile + isDirectory: -> stats.isDirectory + isSymbolicLink: -> stats.isLink + isBlockDevice: -> false + isCharacterDevice: -> false + isFIFO: -> false + isSocket: -> false + } + +# Create a ENOENT error. +createNotFoundError = (asarPath, filePath) -> + error = new Error("ENOENT, #{filePath} not found in #{asarPath}") + error.code = "ENOENT" + error.errno = -2 + error + +# Override fs APIs. +lstatSync = fs.lstatSync +fs.lstatSync = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return lstatSync p unless isAsar + + archive = getOrCreateArchive asarPath + throw new Error("Invalid package #{asarPath}") unless archive + + stats = archive.stat filePath + throw createNotFoundError(asarPath, filePath) unless stats + + asarStatsToFsStats stats + +lstat = fs.lstat +fs.lstat = (p, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return lstat p, callback unless isAsar + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + stats = getOrCreateArchive(asarPath).stat filePath + return callback createNotFoundError(asarPath, filePath) unless stats + + process.nextTick -> callback null, asarStatsToFsStats stats + +statSync = fs.statSync +fs.statSync = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return statSync p unless isAsar + + # Do not distinguish links for now. + fs.lstatSync p + +stat = fs.stat +fs.stat = (p, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return stat p, callback unless isAsar + + # Do not distinguish links for now. + process.nextTick -> fs.lstat p, callback + +statSyncNoException = fs.statSyncNoException +fs.statSyncNoException = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return statSyncNoException p unless isAsar + + archive = getOrCreateArchive asarPath + return false unless archive + stats = archive.stat filePath + return false unless stats + asarStatsToFsStats stats + +realpathSync = fs.realpathSync +fs.realpathSync = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return realpathSync.apply this, arguments unless isAsar + + archive = getOrCreateArchive asarPath + throw new Error("Invalid package #{asarPath}") unless archive + + real = archive.realpath filePath + throw createNotFoundError(asarPath, filePath) if real is false + + path.join realpathSync(asarPath), real + +realpath = fs.realpath +fs.realpath = (p, cache, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return realpath.apply this, arguments unless isAsar + + if typeof cache is 'function' + callback = cache + cache = undefined + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + real = archive.realpath filePath + return callback createNotFoundError(asarPath, filePath) if real is false + + realpath asarPath, (err, p) -> + return callback err if err + callback null, path.join(p, real) + +exists = fs.exists +fs.exists = (p, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return exists p, callback unless isAsar + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + process.nextTick -> callback archive.stat(filePath) isnt false + +existsSync = fs.existsSync +fs.existsSync = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return existsSync p unless isAsar + + archive = getOrCreateArchive asarPath + return false unless archive + + archive.stat(filePath) isnt false + +open = fs.open +readFile = fs.readFile +fs.readFile = (p, options, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return readFile.apply this, arguments unless isAsar + + if typeof options is 'function' + callback = options + options = undefined + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + info = archive.getFileInfo filePath + return callback createNotFoundError(asarPath, filePath) unless info + + if not options + options = encoding: null, flag: 'r' + else if util.isString options + options = encoding: options, flag: 'r' + else if not util.isObject options + throw new TypeError('Bad arguments') + + flag = options.flag || 'r' + encoding = options.encoding + + buffer = new Buffer(info.size) + open archive.path, flag, (error, fd) -> + return callback error if error + fs.read fd, buffer, 0, info.size, info.offset, (error) -> + fs.close fd, -> + callback error, if encoding then buffer.toString encoding else buffer + +openSync = fs.openSync +readFileSync = fs.readFileSync +fs.readFileSync = (p, options) -> + [isAsar, asarPath, filePath] = splitPath p + return readFileSync.apply this, arguments unless isAsar + + archive = getOrCreateArchive asarPath + throw new Error("Invalid package #{asarPath}") unless archive + + info = archive.getFileInfo filePath + throw createNotFoundError(asarPath, filePath) unless info + + if not options + options = encoding: null, flag: 'r' + else if util.isString options + options = encoding: options, flag: 'r' + else if not util.isObject options + throw new TypeError('Bad arguments') + + flag = options.flag || 'r' + encoding = options.encoding + + buffer = new Buffer(info.size) + fd = openSync archive.path, flag + try + fs.readSync fd, buffer, 0, info.size, info.offset + catch e + throw e + finally + fs.closeSync fd + if encoding then buffer.toString encoding else buffer + +readdir = fs.readdir +fs.readdir = (p, callback) -> + [isAsar, asarPath, filePath] = splitPath p + return readdir.apply this, arguments unless isAsar + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + files = archive.readdir filePath + return callback createNotFoundError(asarPath, filePath) unless files + + process.nextTick -> callback null, files + +readdirSync = fs.readdirSync +fs.readdirSync = (p) -> + [isAsar, asarPath, filePath] = splitPath p + return readdirSync.apply this, arguments unless isAsar + + archive = getOrCreateArchive asarPath + throw new Error("Invalid package #{asarPath}") unless archive + + files = archive.readdir filePath + throw createNotFoundError(asarPath, filePath) unless files + + files + +# Override APIs that rely on passing file path instead of content to C++. +overrideAPISync = (module, name, arg = 0) -> + old = module[name] + module[name] = -> + p = arguments[arg] + [isAsar, asarPath, filePath] = splitPath p + return old.apply this, arguments unless isAsar + + archive = getOrCreateArchive asarPath + throw new Error("Invalid package #{asarPath}") unless archive + + newPath = archive.copyFileOut filePath + throw createNotFoundError(asarPath, filePath) unless newPath + + arguments[arg] = newPath + old.apply this, arguments + +overrideAPI = (module, name, arg = 0) -> + old = module[name] + module[name] = -> + p = arguments[arg] + [isAsar, asarPath, filePath] = splitPath p + return old.apply this, arguments unless isAsar + + callback = arguments[arguments.length - 1] + return overrideAPISync module, name, arg unless typeof callback is 'function' + + archive = getOrCreateArchive asarPath + return callback new Error("Invalid package #{asarPath}") unless archive + + newPath = archive.copyFileOut filePath + return callback createNotFoundError(asarPath, filePath) unless newPath + + arguments[arg] = newPath + old.apply this, arguments + +overrideAPI fs, 'open' +overrideAPI child_process, 'execFile' +overrideAPISync process, 'dlopen', 1 +overrideAPISync require('module')._extensions, '.node', 1 +overrideAPISync fs, 'openSync' +overrideAPISync child_process, 'fork' diff --git a/atom/common/lib/init.coffee b/atom/common/lib/init.coffee index 0b4b2c224aca..c85fb65a958a 100644 --- a/atom/common/lib/init.coffee +++ b/atom/common/lib/init.coffee @@ -33,3 +33,6 @@ global.clearImmediate = timers.clearImmediate if process.type is 'browser' global.setTimeout = wrapWithActivateUvLoop timers.setTimeout global.setInterval = wrapWithActivateUvLoop timers.setInterval + +# Add support for asar packages. +require './asar' diff --git a/atom/common/node_bindings.cc b/atom/common/node_bindings.cc index a4dec37f3bd5..a6f3659a2276 100644 --- a/atom/common/node_bindings.cc +++ b/atom/common/node_bindings.cc @@ -69,6 +69,7 @@ REFERENCE_MODULE(atom_browser_protocol); REFERENCE_MODULE(atom_browser_global_shortcut); REFERENCE_MODULE(atom_browser_tray); REFERENCE_MODULE(atom_browser_window); +REFERENCE_MODULE(atom_common_asar); REFERENCE_MODULE(atom_common_clipboard); REFERENCE_MODULE(atom_common_crash_reporter); REFERENCE_MODULE(atom_common_id_weak_map); diff --git a/atom/renderer/lib/init.coffee b/atom/renderer/lib/init.coffee index 94d8679ca019..3fb4ceaabf42 100644 --- a/atom/renderer/lib/init.coffee +++ b/atom/renderer/lib/init.coffee @@ -24,8 +24,12 @@ require path.resolve(__dirname, '..', '..', 'common', 'lib', 'init.js') global.require = require global.module = module -# Set the __filename to the path of html file if it's file:// protocol. -if window.location.protocol is 'file:' +# Emit the 'exit' event when page is unloading. +window.addEventListener 'unload', -> + process.emit 'exit' + +# Set the __filename to the path of html file if it's file: or asar: protocol. +if window.location.protocol in ['file:', 'asar:'] pathname = if process.platform is 'win32' window.location.pathname.substr 1 diff --git a/docs/README.md b/docs/README.md index db94d57a381a..d5d3468f760d 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,6 +2,7 @@ * [Quick start](tutorial/quick-start.md) * [Application distribution](tutorial/application-distribution.md) +* [Application packaging](tutorial/application-packaging.md) * [Using native node modules](tutorial/using-native-node-modules.md) * [Debugging browser process](tutorial/debugging-browser-process.md) * [Using Selenium and WebDriver](tutorial/using-selenium-and-webdriver.md) diff --git a/docs/api/app.md b/docs/api/app.md index 0f585f5e521f..b9a837d77bb2 100644 --- a/docs/api/app.md +++ b/docs/api/app.md @@ -45,6 +45,10 @@ terminating the application. See description of `window-all-closed` for the differences between `will-quit` and it. +## Event: quit + +Emitted when application is quitting. + ## Event: open-file * `event` Event diff --git a/docs/tutorial/application-distribution.md b/docs/tutorial/application-distribution.md index cba702c42268..72d78bc9b253 100644 --- a/docs/tutorial/application-distribution.md +++ b/docs/tutorial/application-distribution.md @@ -27,7 +27,19 @@ Then execute `Atom.app` (or `atom` on Linux, and `atom.exe` on Windows), and atom-shell will start as your app. The `atom-shell` directory would then be your distribution that should be delivered to final users. -## Build with grunt +## Packaging your app into a file + +Apart from shipping your app by copying all its sources files, you can also +package your app into [asar](https://github.com/atom/asar) archive to avoid +exposing your app's source code to users. + +To use the `asar` archive to replace the `app` folder, you need to rename the +archive to `app.asar`, and put it under atom-shell's resources directory, +atom-shell will then try read the archive and start from it. + +More details can be found in [Application packaging](application-packaging.md). + +## Building with grunt If you build your application with `grunt` there is a grunt task that can download atom-shell for your current platform automatically: diff --git a/docs/tutorial/application-packaging.md b/docs/tutorial/application-packaging.md new file mode 100644 index 000000000000..0303969d6767 --- /dev/null +++ b/docs/tutorial/application-packaging.md @@ -0,0 +1,139 @@ +# Application packaging + +To protect your app's resources and source code from the users, you can choose +to package your app into [asar][asar] archive with little changes to your source +code. + +## Generating `asar` archive + +The [asar][asar] archive is a simple tar-like format that concatenates files +into a single file, atom-shell can read arbitrary file in it without unpacking +the whole file. + +Following is the steps to package your app into `asar` archive: + +### 1. Install asar utility + +```bash +$ npm install -g asar +``` + +### 2. Package with `asar pack` + +```bash +$ asar pack your-app app.asar +``` + +## Using `asar` archives + +In atom-shell there are two sets of APIs: Node APIs provided by Node.js, and Web +APIs provided by Chromium. Both APIs support reading file from `asar` archives. + +### Node API + +With special patches in atom-shell, Node APIs like `fs.readFile` and `require` +treat `asar` archives as virtual directories, and the files in it as normal +files in filesystem. + +For example, suppose we have an `example.asar` archive under `/path/to`: + +```bash +$ asar list /path/to/example.asar +/app.js +/file.txt +/dir/module.js +/static/index.html +/static/main.css +/static/jquery.min.js +``` + +Read a file in `asar` archive: + +```javascript +var fs = require('fs'); +fs.readFileSync('/path/to/example.asar/file.txt'); +``` + +List all files under the root of archive: + +```javascript +var fs = require('fs'); +fs.readdirSync('/path/to/example.asar'); +``` + +Use a module from the archive: + +```javascript +require('/path/to/example.asar/dir/module.js'); +``` + +### Web API + +In web page files in archive can be requests by using the `asar:` protocol, +like node API, `asar` archives are treated as directories. + +For example, to get a file with `$.get`: + +```html + +``` + +The `asar:` protocol can also be used to request normal files in filesystem, +just like the `file:` protocol. But unlike `file:` protocol, there is no slashes +(`//`) after `asar:`. + +You can also display a web page in `asar` archive with `BrowserWindow`: + +```javascript +var BrowserWindow = require('browser-window'); +var win = new BrowserWindow({width: 800, height: 600}); +win.loadUrl('asar:/path/to/example.asar/static/index.html'); +``` + +## Limitations on Node API + +Even though we tried hard to make `asar` archives in Node API work like +directories as much as possible, there are still limitations due to the +low-level nature of Node API. + +### Archives are read only + +The archives can not be modifies so all Node APIs that can modify files will not +work with `asar` archives. + +### Working directory can not be set to directories in archive + +Though `asar` archives are treated as directories, there are no actual +directories in the filesystem, so you can never set working directory to +directories in `asar` archives, passing them to `cwd` option of some APIs will +also cause errors. + +### Extra unpacking on some APIs + +Most `fs` APIs can read file or get file's information from `asar` archives +without unpacking, but for some APIs that rely on passing the real file path to +underlying system calls, atom-shell will extract the needed file into a +temporary file and pass the path of the temporary file to the APIs to make them +work. This adds a little overhead for those APIs. + +APIs that requires extra unpacking are: + +* `child_process.execFile` +* `child_process.fork` +* `fs.open` +* `fs.openSync` +* `process.dlopen` - Used by `require` on native modules + +### Fake stat information of `fs.stat` + +The `Stats` object returned by `fs.stat` and its friends on files in `asar` +archives are generated by guessing, because those files do not exist on +filesystem. So you should not trust the `Stats` object except for getting file +size and checking file type. + +[asar]: https://github.com/atom/asar diff --git a/docs/tutorial/quick-start.md b/docs/tutorial/quick-start.md index 84e41f08cd78..dca58f679654 100644 --- a/docs/tutorial/quick-start.md +++ b/docs/tutorial/quick-start.md @@ -8,8 +8,8 @@ a variant of the Node.js runtime which is focused on desktop applications instead of web servers. It doesn't mean atom-shell is a JavaScript binding to GUI libraries. Instead, -atom-shell uses web pages as its GUI, so you could also see it as a minimal Chromium -browser, controlled by JavaScript. +atom-shell uses web pages as its GUI, so you could also see it as a minimal +Chromium browser, controlled by JavaScript. ### The browser side @@ -18,9 +18,11 @@ are two types of JavaScript scripts: the server side scripts and the client side scripts. Server-side JavaScript is that which runs on the Node.js runtime, while client-side JavaScript runs inside the user's browser. -In atom-shell we have similar concepts: Since atom-shell displays a GUI by showing -web pages, we have **scripts that run in the web page**, and also **scripts run by the atom-shell runtime**, which creates those web pages. -Like Node.js, we call them **client scripts**, and **browser scripts** (meaning the browser replaces the concept of the server here). +In atom-shell we have similar concepts: Since atom-shell displays a GUI by +showing web pages, we have **scripts that run in the web page**, and also +**scripts run by the atom-shell runtime**, which creates those web pages. +Like Node.js, we call them **client scripts**, and **browser scripts** +(meaning the browser replaces the concept of the server here). In traditional Node.js applications, communication between server and client is usually facilitated via web sockets. In atom-shell, we have provided @@ -30,19 +32,20 @@ support. ### Web page and Node.js -Normal web pages are designed to not reach outside of the browser, which makes them -unsuitable for interacting with native systems. Atom-shell provides Node.js APIs -in web pages so you can access native resources from web pages, just like +Normal web pages are designed to not reach outside of the browser, which makes +them unsuitable for interacting with native systems. Atom-shell provides Node.js +APIs in web pages so you can access native resources from web pages, just like [Node-Webkit](https://github.com/rogerwang/node-webkit). But unlike Node-Webkit, you cannot do native GUI related operations in web -pages. Instead you need to do them on the browser side by sending messages to it, or -using the easy [remote](../api/remote.md) module. +pages. Instead you need to do them on the browser side by sending messages to +it, or using the easy [remote](../api/remote.md) module. ## Write your first atom-shell app -Generally, an atom-shell app would be structured like this (see the [hello-atom](https://github.com/dougnukem/hello-atom) repo for reference): +Generally, an atom-shell app would be structured like this (see the +[hello-atom](https://github.com/dougnukem/hello-atom) repo for reference): ```text your-app/ @@ -51,10 +54,10 @@ your-app/ └── index.html ``` -The format of `package.json` is exactly the same as that of Node's modules, and the -script specified by the `main` field is the startup script of your app, which -will run on the browser side. An example of your `package.json` might look like -this: +The format of `package.json` is exactly the same as that of Node's modules, and +the script specified by the `main` field is the startup script of your app, +which will run on the browser side. An example of your `package.json` might look +like this: ```json { @@ -123,8 +126,8 @@ Finally the `index.html` is the web page you want to show: After you're done writing your app, you can create a distribution by following the [Application distribution](./application-distribution.md) guide -and then execute the packaged app. -You can also just use the downloaded atom-shell binary to execute your app directly. +and then execute the packaged app. You can also just use the downloaded +atom-shell binary to execute your app directly. On Windows: @@ -144,4 +147,5 @@ On Mac OS X: $ ./Atom.app/Contents/MacOS/Atom your-app/ ``` -`Atom.app` here is part of the atom-shell's release package, you can download it from [here](https://github.com/atom/atom-shell/releases). +`Atom.app` here is part of the atom-shell's release package, you can download +it from [here](https://github.com/atom/atom-shell/releases). diff --git a/spec/asar-spec.coffee b/spec/asar-spec.coffee new file mode 100644 index 000000000000..d1d4c6d996f7 --- /dev/null +++ b/spec/asar-spec.coffee @@ -0,0 +1,375 @@ +assert = require 'assert' +fs = require 'fs' +path = require 'path' + +describe 'asar package', -> + fixtures = path.join __dirname, 'fixtures' + + describe 'node api', -> + describe 'fs.readFileSync', -> + it 'reads a normal file', -> + file1 = path.join fixtures, 'asar', 'a.asar', 'file1' + assert.equal fs.readFileSync(file1).toString(), 'file1\n' + file2 = path.join fixtures, 'asar', 'a.asar', 'file2' + assert.equal fs.readFileSync(file2).toString(), 'file2\n' + file3 = path.join fixtures, 'asar', 'a.asar', 'file3' + assert.equal fs.readFileSync(file3).toString(), 'file3\n' + + it 'reads a linked file', -> + p = path.join fixtures, 'asar', 'a.asar', 'link1' + assert.equal fs.readFileSync(p).toString(), 'file1\n' + + it 'reads a file from linked directory', -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'file1' + assert.equal fs.readFileSync(p).toString(), 'file1\n' + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2', 'file1' + assert.equal fs.readFileSync(p).toString(), 'file1\n' + + it 'throws ENOENT error when can not find file', -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + throws = -> fs.readFileSync p + assert.throws throws, /ENOENT/ + + describe 'fs.readFile', -> + it 'reads a normal file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'file1' + fs.readFile p, (err, content) -> + assert.equal err, null + assert.equal String(content), 'file1\n' + done() + + it 'reads a linked file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link1' + fs.readFile p, (err, content) -> + assert.equal err, null + assert.equal String(content), 'file1\n' + done() + + it 'reads a file from linked directory', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2', 'file1' + fs.readFile p, (err, content) -> + assert.equal err, null + assert.equal String(content), 'file1\n' + done() + + it 'throws ENOENT error when can not find file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + fs.readFile p, (err, content) -> + assert.equal err.code, 'ENOENT' + done() + + describe 'fs.lstatSync', -> + it 'returns information of root', -> + p = path.join fixtures, 'asar', 'a.asar' + stats = fs.lstatSync p + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), true + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 0 + + it 'returns information of a normal file', -> + for file in ['file1', 'file2', 'file3', path.join('dir1', 'file1'), path.join('link2', 'file1')] + p = path.join fixtures, 'asar', 'a.asar', file + stats = fs.lstatSync p + assert.equal stats.isFile(), true + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 6 + + it 'returns information of a normal directory', -> + for file in ['dir1', 'dir2', 'dir3'] + p = path.join fixtures, 'asar', 'a.asar', file + stats = fs.lstatSync p + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), true + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 0 + + it 'returns information of a linked file', -> + for file in ['link1', path.join('dir1', 'link1'), path.join('link2', 'link2')] + p = path.join fixtures, 'asar', 'a.asar', file + stats = fs.lstatSync p + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), true + assert.equal stats.size, 0 + + it 'returns information of a linked directory', -> + for file in ['link2', path.join('dir1', 'link2'), path.join('link2', 'link2')] + p = path.join fixtures, 'asar', 'a.asar', file + stats = fs.lstatSync p + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), true + assert.equal stats.size, 0 + + it 'throws ENOENT error when can not find file', -> + for file in ['file4', 'file5', path.join('dir1', 'file4')] + p = path.join fixtures, 'asar', 'a.asar', file + throws = -> fs.lstatSync p + assert.throws throws, /ENOENT/ + + describe 'fs.lstat', -> + it 'returns information of root', (done) -> + p = path.join fixtures, 'asar', 'a.asar' + stats = fs.lstat p, (err, stats) -> + assert.equal err, null + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), true + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 0 + done() + + it 'returns information of a normal file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'file1' + stats = fs.lstat p, (err, stats) -> + assert.equal err, null + assert.equal stats.isFile(), true + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 6 + done() + + it 'returns information of a normal directory', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'dir1' + stats = fs.lstat p, (err, stats) -> + assert.equal err, null + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), true + assert.equal stats.isSymbolicLink(), false + assert.equal stats.size, 0 + done() + + it 'returns information of a linked file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link1' + stats = fs.lstat p, (err, stats) -> + assert.equal err, null + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), true + assert.equal stats.size, 0 + done() + + it 'returns information of a linked directory', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2' + stats = fs.lstat p, (err, stats) -> + assert.equal err, null + assert.equal stats.isFile(), false + assert.equal stats.isDirectory(), false + assert.equal stats.isSymbolicLink(), true + assert.equal stats.size, 0 + done() + + it 'throws ENOENT error when can not find file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'file4' + stats = fs.lstat p, (err, stats) -> + assert.equal err.code, 'ENOENT' + done() + + describe 'fs.realpathSync', -> + it 'returns real path root', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = 'a.asar' + r = fs.realpathSync path.join(parent, p) + assert.equal r, path.join(parent, p) + + it 'returns real path of a normal file', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'file1' + r = fs.realpathSync path.join(parent, p) + assert.equal r, path.join(parent, p) + + it 'returns real path of a normal directory', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'dir1' + r = fs.realpathSync path.join(parent, p) + assert.equal r, path.join(parent, p) + + it 'returns real path of a linked file', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'link2', 'link1' + r = fs.realpathSync path.join(parent, p) + assert.equal r, path.join(parent, 'a.asar', 'file1') + + it 'returns real path of a linked directory', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'link2', 'link2' + r = fs.realpathSync path.join(parent, p) + assert.equal r, path.join(parent, 'a.asar', 'dir1') + + it 'throws ENOENT error when can not find file', -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'not-exist' + throws = -> fs.realpathSync path.join(parent, p) + assert.throws throws, /ENOENT/ + + describe 'fs.realpath', -> + it 'returns real path root', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = 'a.asar' + fs.realpath path.join(parent, p), (err, r) -> + assert.equal err, null + assert.equal r, path.join(parent, p) + done() + + it 'returns real path of a normal file', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'file1' + fs.realpath path.join(parent, p), (err, r) -> + assert.equal err, null + assert.equal r, path.join(parent, p) + done() + + it 'returns real path of a normal directory', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'dir1' + fs.realpath path.join(parent, p), (err, r) -> + assert.equal err, null + assert.equal r, path.join(parent, p) + done() + + it 'returns real path of a linked file', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'link2', 'link1' + fs.realpath path.join(parent, p), (err, r) -> + assert.equal err, null + assert.equal r, path.join(parent, 'a.asar', 'file1') + done() + + it 'returns real path of a linked directory', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'link2', 'link2' + fs.realpath path.join(parent, p), (err, r) -> + assert.equal err, null + assert.equal r, path.join(parent, 'a.asar', 'dir1') + done() + + it 'throws ENOENT error when can not find file', (done) -> + parent = fs.realpathSync path.join(fixtures, 'asar') + p = path.join 'a.asar', 'not-exist' + fs.realpath path.join(parent, p), (err, stats) -> + assert.equal err.code, 'ENOENT' + done() + + describe 'fs.readdirSync', -> + it 'reads dirs from root', -> + p = path.join fixtures, 'asar', 'a.asar' + dirs = fs.readdirSync p + assert.deepEqual dirs, ['dir1', 'dir2', 'dir3', 'file1', 'file2', 'file3', 'link1', 'link2', 'ping.js'] + + it 'reads dirs from a normal dir', -> + p = path.join fixtures, 'asar', 'a.asar', 'dir1' + dirs = fs.readdirSync p + assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2'] + + it 'reads dirs from a linked dir', -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2' + dirs = fs.readdirSync p + assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2'] + + it 'throws ENOENT error when can not find file', -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + throws = -> fs.readdirSync p + assert.throws throws, /ENOENT/ + + describe 'fs.readdir', -> + it 'reads dirs from root', (done) -> + p = path.join fixtures, 'asar', 'a.asar' + dirs = fs.readdir p, (err, dirs) -> + assert.equal err, null + assert.deepEqual dirs, ['dir1', 'dir2', 'dir3', 'file1', 'file2', 'file3', 'link1', 'link2', 'ping.js'] + done() + + it 'reads dirs from a normal dir', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'dir1' + dirs = fs.readdir p, (err, dirs) -> + assert.equal err, null + assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2'] + done() + + it 'reads dirs from a linked dir', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'link2', 'link2' + dirs = fs.readdir p, (err, dirs) -> + assert.equal err, null + assert.deepEqual dirs, ['file1', 'file2', 'file3', 'link1', 'link2'] + done() + + it 'throws ENOENT error when can not find file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + fs.readdir p, (err, stats) -> + assert.equal err.code, 'ENOENT' + done() + + describe 'fs.openSync', -> + it 'opens a normal/linked/under-linked-directory file', -> + for file in ['file1', 'link1', path.join('link2', 'file1')] + p = path.join fixtures, 'asar', 'a.asar', file + fd = fs.openSync p, 'r' + buffer = new Buffer(6) + fs.readSync fd, buffer, 0, 6, 0 + assert.equal String(buffer), 'file1\n' + fs.closeSync fd + + it 'throws ENOENT error when can not find file', -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + throws = -> fs.openSync p + assert.throws throws, /ENOENT/ + + describe 'fs.open', -> + it 'opens a normal file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'file1' + fs.open p, 'r', (err, fd) -> + assert.equal err, null + buffer = new Buffer(6) + fs.read fd, buffer, 0, 6, 0, (err) -> + assert.equal err, null + assert.equal String(buffer), 'file1\n' + fs.close fd, done + + it 'throws ENOENT error when can not find file', (done) -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + fs.open p, (err, stats) -> + assert.equal err.code, 'ENOENT' + done() + + describe 'child_process.fork', -> + child_process = require 'child_process' + + it 'opens a normal js file', (done) -> + child = child_process.fork path.join(fixtures, 'asar', 'a.asar', 'ping.js') + child.on 'message', (msg) -> + assert.equal msg, 'message' + done() + child.send 'message' + + it 'throws ENOENT error when can not find file', -> + p = path.join fixtures, 'asar', 'a.asar', 'not-exist' + throws = -> child_process.fork p + assert.throws throws, /ENOENT/ + + describe 'asar protocol', -> + it 'can request a file in package', (done) -> + p = path.resolve fixtures, 'asar', 'a.asar', 'file1' + $.get "asar:#{p}", (data) -> + assert.equal data, 'file1\n' + done() + + it 'can request a linked file in package', (done) -> + p = path.resolve fixtures, 'asar', 'a.asar', 'link2', 'link1' + $.get "asar:#{p}", (data) -> + assert.equal data, 'file1\n' + done() + + it 'can request a file in filesystem', (done) -> + p = path.resolve fixtures, 'asar', 'file' + $.get "asar:#{p}", (data) -> + assert.equal data, 'file\n' + done() + + it 'gets 404 when file is not found', (done) -> + p = path.resolve fixtures, 'asar', 'a.asar', 'no-exist' + $.ajax + url: "asar:#{p}" + error: (err) -> + assert.equal err.status, 404 + done() diff --git a/spec/fixtures/asar/a.asar b/spec/fixtures/asar/a.asar new file mode 100644 index 000000000000..0b74f5639cd8 Binary files /dev/null and b/spec/fixtures/asar/a.asar differ diff --git a/spec/fixtures/asar/file b/spec/fixtures/asar/file new file mode 100644 index 000000000000..f73f3093ff86 --- /dev/null +++ b/spec/fixtures/asar/file @@ -0,0 +1 @@ +file diff --git a/vendor/native_mate b/vendor/native_mate index 12f4e9b7ea00..c5b39126ee73 160000 --- a/vendor/native_mate +++ b/vendor/native_mate @@ -1 +1 @@ -Subproject commit 12f4e9b7ea0038e58e52839142eff0a4d17069bf +Subproject commit c5b39126ee7388acc61a25ac6b5fefb7a2cd6262