2015-05-29 15:54:00 +00:00
|
|
|
// Copyright (c) 2015 GitHub, Inc.
|
|
|
|
// Use of this source code is governed by the MIT license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#include "atom/browser/net/url_request_fetch_job.h"
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
#include <algorithm>
|
2015-06-17 03:34:47 +00:00
|
|
|
#include <string>
|
2015-05-29 15:54:00 +00:00
|
|
|
|
2016-11-27 19:07:50 +00:00
|
|
|
#include "atom/browser/api/atom_api_session.h"
|
|
|
|
#include "atom/browser/atom_browser_context.h"
|
2017-05-02 22:28:14 +00:00
|
|
|
#include "base/guid.h"
|
2016-07-04 06:08:55 +00:00
|
|
|
#include "base/memory/ptr_util.h"
|
2015-06-17 02:57:26 +00:00
|
|
|
#include "base/strings/string_util.h"
|
2017-05-02 22:28:14 +00:00
|
|
|
#include "brightray/browser/url_request_context_getter.h"
|
|
|
|
#include "content/browser/streams/stream_context.h"
|
2016-03-10 05:39:40 +00:00
|
|
|
#include "native_mate/dictionary.h"
|
2015-05-29 15:54:00 +00:00
|
|
|
#include "net/base/io_buffer.h"
|
|
|
|
#include "net/base/net_errors.h"
|
2015-06-17 01:31:33 +00:00
|
|
|
#include "net/http/http_response_headers.h"
|
|
|
|
#include "net/url_request/url_fetcher.h"
|
|
|
|
#include "net/url_request/url_fetcher_response_writer.h"
|
2017-05-02 22:28:14 +00:00
|
|
|
#include "url/url_constants.h"
|
2016-03-10 05:39:40 +00:00
|
|
|
|
|
|
|
using content::BrowserThread;
|
2015-05-29 15:54:00 +00:00
|
|
|
|
|
|
|
namespace atom {
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
namespace {
|
|
|
|
|
2015-06-17 02:57:26 +00:00
|
|
|
// Convert string to RequestType.
|
|
|
|
net::URLFetcher::RequestType GetRequestType(const std::string& raw) {
|
2015-12-07 11:56:23 +00:00
|
|
|
std::string method = base::ToUpperASCII(raw);
|
2015-06-17 02:57:26 +00:00
|
|
|
if (method.empty() || method == "GET")
|
|
|
|
return net::URLFetcher::GET;
|
|
|
|
else if (method == "POST")
|
|
|
|
return net::URLFetcher::POST;
|
|
|
|
else if (method == "HEAD")
|
|
|
|
return net::URLFetcher::HEAD;
|
|
|
|
else if (method == "DELETE")
|
|
|
|
return net::URLFetcher::DELETE_REQUEST;
|
|
|
|
else if (method == "PUT")
|
|
|
|
return net::URLFetcher::PUT;
|
|
|
|
else if (method == "PATCH")
|
|
|
|
return net::URLFetcher::PATCH;
|
|
|
|
else // Use "GET" as fallback.
|
|
|
|
return net::URLFetcher::GET;
|
|
|
|
}
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
// Pipe the response writer back to URLRequestFetchJob.
|
|
|
|
class ResponsePiper : public net::URLFetcherResponseWriter {
|
|
|
|
public:
|
|
|
|
explicit ResponsePiper(URLRequestFetchJob* job)
|
|
|
|
: first_write_(true), job_(job) {}
|
|
|
|
|
|
|
|
// net::URLFetcherResponseWriter:
|
|
|
|
int Initialize(const net::CompletionCallback& callback) override {
|
|
|
|
return net::OK;
|
|
|
|
}
|
|
|
|
int Write(net::IOBuffer* buffer,
|
|
|
|
int num_bytes,
|
|
|
|
const net::CompletionCallback& callback) override {
|
|
|
|
if (first_write_) {
|
|
|
|
// The URLFetcherResponseWriter doesn't have an event when headers have
|
|
|
|
// been read, so we have to emulate by hooking to first write event.
|
|
|
|
job_->HeadersCompleted();
|
|
|
|
first_write_ = false;
|
|
|
|
}
|
2017-05-02 22:28:14 +00:00
|
|
|
job_->stream()->AddData(buffer->data(), num_bytes);
|
|
|
|
return num_bytes;
|
2015-06-17 01:31:33 +00:00
|
|
|
}
|
2017-01-24 05:35:26 +00:00
|
|
|
int Finish(int net_error, const net::CompletionCallback& callback) override {
|
2015-06-17 01:31:33 +00:00
|
|
|
return net::OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
bool first_write_;
|
|
|
|
URLRequestFetchJob* job_;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(ResponsePiper);
|
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
URLRequestFetchJob::URLRequestFetchJob(net::URLRequest* request,
|
|
|
|
net::NetworkDelegate* network_delegate)
|
2015-08-12 14:57:25 +00:00
|
|
|
: JsAsker<net::URLRequestJob>(request, network_delegate),
|
2016-05-30 11:31:00 +00:00
|
|
|
pending_buffer_size_(0),
|
2017-05-02 22:28:14 +00:00
|
|
|
total_bytes_read_(0) {}
|
2015-08-12 13:09:44 +00:00
|
|
|
|
2016-03-10 05:39:40 +00:00
|
|
|
void URLRequestFetchJob::BeforeStartInUI(
|
|
|
|
v8::Isolate* isolate, v8::Local<v8::Value> value) {
|
|
|
|
mate::Dictionary options;
|
|
|
|
if (!mate::ConvertFromV8(isolate, value, &options))
|
|
|
|
return;
|
|
|
|
|
|
|
|
// When |session| is set to |null| we use a new request context for fetch job.
|
2016-11-27 19:10:06 +00:00
|
|
|
v8::Local<v8::Value> val;
|
|
|
|
if (options.Get("session", &val)) {
|
|
|
|
if (val->IsNull()) {
|
2016-11-27 19:07:50 +00:00
|
|
|
// We have to create the URLRequestContextGetter on UI thread.
|
|
|
|
url_request_context_getter_ = new brightray::URLRequestContextGetter(
|
|
|
|
this, nullptr, nullptr, base::FilePath(), true,
|
2017-01-24 05:47:43 +00:00
|
|
|
BrowserThread::GetTaskRunnerForThread(BrowserThread::IO),
|
|
|
|
BrowserThread::GetTaskRunnerForThread(BrowserThread::FILE),
|
2016-11-27 19:07:50 +00:00
|
|
|
nullptr, content::URLRequestInterceptorScopedVector());
|
|
|
|
} else {
|
2016-11-27 19:10:06 +00:00
|
|
|
mate::Handle<api::Session> session;
|
|
|
|
if (mate::ConvertFromV8(isolate, val, &session) && !session.IsEmpty()) {
|
|
|
|
AtomBrowserContext* browser_context = session->browser_context();
|
|
|
|
url_request_context_getter_ =
|
|
|
|
browser_context->url_request_context_getter();
|
|
|
|
}
|
2016-11-27 19:07:50 +00:00
|
|
|
}
|
2016-03-10 05:39:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-23 01:59:39 +00:00
|
|
|
void URLRequestFetchJob::StartAsync(std::unique_ptr<base::Value> options) {
|
2017-04-05 08:34:53 +00:00
|
|
|
if (!options->IsType(base::Value::Type::DICTIONARY)) {
|
2015-08-12 13:09:44 +00:00
|
|
|
NotifyStartError(net::URLRequestStatus(
|
|
|
|
net::URLRequestStatus::FAILED, net::ERR_NOT_IMPLEMENTED));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string url, method, referrer;
|
2015-12-06 20:27:02 +00:00
|
|
|
base::DictionaryValue* upload_data = nullptr;
|
2015-08-12 13:09:44 +00:00
|
|
|
base::DictionaryValue* dict =
|
|
|
|
static_cast<base::DictionaryValue*>(options.get());
|
|
|
|
dict->GetString("url", &url);
|
|
|
|
dict->GetString("method", &method);
|
|
|
|
dict->GetString("referrer", &referrer);
|
2015-12-06 20:27:02 +00:00
|
|
|
dict->GetDictionary("uploadData", &upload_data);
|
2015-08-12 13:09:44 +00:00
|
|
|
|
2015-08-13 13:21:23 +00:00
|
|
|
// Check if URL is valid.
|
|
|
|
GURL formated_url(url);
|
|
|
|
if (!formated_url.is_valid()) {
|
|
|
|
NotifyStartError(net::URLRequestStatus(
|
|
|
|
net::URLRequestStatus::FAILED, net::ERR_INVALID_URL));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2015-06-17 03:11:13 +00:00
|
|
|
// Use |request|'s method if |method| is not specified.
|
|
|
|
net::URLFetcher::RequestType request_type;
|
|
|
|
if (method.empty())
|
2015-08-12 13:09:44 +00:00
|
|
|
request_type = GetRequestType(request()->method());
|
2015-06-17 03:11:13 +00:00
|
|
|
else
|
|
|
|
request_type = GetRequestType(method);
|
|
|
|
|
2015-08-13 13:21:23 +00:00
|
|
|
fetcher_ = net::URLFetcher::Create(formated_url, request_type, this);
|
2016-07-04 06:08:55 +00:00
|
|
|
fetcher_->SaveResponseWithWriter(base::WrapUnique(new ResponsePiper(this)));
|
2015-06-17 03:20:09 +00:00
|
|
|
|
2016-03-10 05:39:40 +00:00
|
|
|
// A request context getter is passed by the user.
|
|
|
|
if (url_request_context_getter_)
|
|
|
|
fetcher_->SetRequestContext(url_request_context_getter_.get());
|
2015-08-12 14:57:25 +00:00
|
|
|
else
|
|
|
|
fetcher_->SetRequestContext(request_context_getter());
|
|
|
|
|
2015-06-17 03:20:09 +00:00
|
|
|
// Use |request|'s referrer if |referrer| is not specified.
|
2015-08-12 13:09:44 +00:00
|
|
|
if (referrer.empty())
|
|
|
|
fetcher_->SetReferrer(request()->referrer());
|
|
|
|
else
|
2015-06-17 03:20:09 +00:00
|
|
|
fetcher_->SetReferrer(referrer);
|
2015-06-17 03:30:31 +00:00
|
|
|
|
2015-12-06 20:27:02 +00:00
|
|
|
// Set the data needed for POSTs.
|
|
|
|
if (upload_data && request_type == net::URLFetcher::POST) {
|
|
|
|
std::string content_type, data;
|
|
|
|
upload_data->GetString("contentType", &content_type);
|
|
|
|
upload_data->GetString("data", &data);
|
|
|
|
fetcher_->SetUploadData(content_type, data);
|
|
|
|
}
|
|
|
|
|
2015-06-17 03:30:31 +00:00
|
|
|
// Use |request|'s headers.
|
2015-08-12 13:09:44 +00:00
|
|
|
fetcher_->SetExtraRequestHeaders(
|
|
|
|
request()->extra_request_headers().ToString());
|
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
// Create readable stream for URLFetcher response.
|
|
|
|
content::StreamContext* stream_context =
|
|
|
|
static_cast<brightray::URLRequestContextGetter*>(request_context_getter())
|
|
|
|
->stream_context();
|
|
|
|
|
|
|
|
if (stream_context) {
|
|
|
|
GURL stream_url(std::string(url::kBlobScheme) + ":" +
|
|
|
|
formated_url.GetOrigin().spec() + base::GenerateGUID());
|
|
|
|
stream_ =
|
|
|
|
new content::Stream(stream_context->registry(), nullptr, stream_url);
|
|
|
|
stream_->SetReadObserver(this);
|
|
|
|
fetcher_->Start();
|
|
|
|
} else {
|
|
|
|
NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::CANCELED,
|
|
|
|
net::ERR_ABORTED));
|
|
|
|
}
|
2015-06-17 02:57:26 +00:00
|
|
|
}
|
2015-06-17 01:31:33 +00:00
|
|
|
|
|
|
|
void URLRequestFetchJob::HeadersCompleted() {
|
|
|
|
response_info_.reset(new net::HttpResponseInfo);
|
|
|
|
response_info_->headers = fetcher_->GetResponseHeaders();
|
|
|
|
NotifyHeadersComplete();
|
|
|
|
}
|
2015-05-29 15:54:00 +00:00
|
|
|
|
|
|
|
void URLRequestFetchJob::Kill() {
|
2015-08-12 13:09:44 +00:00
|
|
|
JsAsker<URLRequestJob>::Kill();
|
2017-05-02 22:28:14 +00:00
|
|
|
ClearStream();
|
2015-06-17 01:31:33 +00:00
|
|
|
fetcher_.reset();
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
void URLRequestFetchJob::OnDataAvailable(content::Stream* stream) {
|
|
|
|
if (!pending_buffer_.get())
|
|
|
|
return;
|
|
|
|
|
|
|
|
int result = 0;
|
|
|
|
auto state = stream_->ReadRawData(pending_buffer_.get(), pending_buffer_size_,
|
|
|
|
&result);
|
|
|
|
if (state == content::Stream::STREAM_ABORTED)
|
|
|
|
result = net::ERR_CONNECTION_RESET;
|
|
|
|
|
|
|
|
// Clear the buffers before notifying the read is complete, so that it is
|
|
|
|
// safe for the observer to read.
|
|
|
|
pending_buffer_ = nullptr;
|
|
|
|
pending_buffer_size_ = 0;
|
|
|
|
|
|
|
|
if (result > 0)
|
|
|
|
total_bytes_read_ += result;
|
|
|
|
ReadRawDataComplete(result);
|
|
|
|
}
|
|
|
|
|
2016-03-08 14:28:53 +00:00
|
|
|
int URLRequestFetchJob::ReadRawData(net::IOBuffer* dest, int dest_size) {
|
2015-11-20 20:50:51 +00:00
|
|
|
if (GetResponseCode() == 204) {
|
|
|
|
request()->set_received_response_content_length(prefilter_bytes_read());
|
2016-03-27 20:39:56 +00:00
|
|
|
return net::OK;
|
2015-11-20 20:50:51 +00:00
|
|
|
}
|
2016-05-30 11:31:00 +00:00
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
int bytes_read = 0;
|
|
|
|
switch (stream_->ReadRawData(dest, dest_size, &bytes_read)) {
|
|
|
|
case content::Stream::STREAM_HAS_DATA:
|
|
|
|
total_bytes_read_ += bytes_read;
|
|
|
|
return bytes_read;
|
|
|
|
case content::Stream::STREAM_COMPLETE:
|
|
|
|
return stream_->GetStatus();
|
|
|
|
case content::Stream::STREAM_EMPTY:
|
|
|
|
pending_buffer_ = dest;
|
|
|
|
pending_buffer_size_ = dest_size;
|
|
|
|
return net::ERR_IO_PENDING;
|
|
|
|
case content::Stream::STREAM_ABORTED:
|
|
|
|
// Handle this as connection reset.
|
|
|
|
return net::ERR_CONNECTION_RESET;
|
2016-05-30 11:31:00 +00:00
|
|
|
}
|
2017-05-02 22:28:14 +00:00
|
|
|
NOTREACHED();
|
|
|
|
return net::ERR_FAILED;
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
bool URLRequestFetchJob::GetMimeType(std::string* mime_type) const {
|
2016-01-06 13:07:07 +00:00
|
|
|
if (!response_info_ || !response_info_->headers)
|
2015-06-17 01:31:33 +00:00
|
|
|
return false;
|
2015-05-29 15:54:00 +00:00
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
return response_info_->headers->GetMimeType(mime_type);
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
void URLRequestFetchJob::GetResponseInfo(net::HttpResponseInfo* info) {
|
|
|
|
if (response_info_)
|
|
|
|
*info = *response_info_;
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2015-06-17 01:31:33 +00:00
|
|
|
int URLRequestFetchJob::GetResponseCode() const {
|
2016-01-06 13:07:07 +00:00
|
|
|
if (!response_info_ || !response_info_->headers)
|
2015-06-17 01:31:33 +00:00
|
|
|
return -1;
|
|
|
|
|
|
|
|
return response_info_->headers->response_code();
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
int64_t URLRequestFetchJob::GetTotalReceivedBytes() const {
|
|
|
|
int64_t total_received_bytes = 0;
|
|
|
|
if (response_info_)
|
|
|
|
total_received_bytes = response_info_->headers->raw_headers().size();
|
|
|
|
if (stream_.get())
|
|
|
|
total_received_bytes += total_bytes_read_;
|
|
|
|
return total_received_bytes;
|
|
|
|
}
|
2016-05-29 07:47:31 +00:00
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
void URLRequestFetchJob::OnURLFetchComplete(const net::URLFetcher* source) {
|
|
|
|
auto status = fetcher_->GetStatus();
|
|
|
|
if (status.is_success()) {
|
2016-11-27 19:10:06 +00:00
|
|
|
if (!response_info_) {
|
|
|
|
// Since we notify header completion only after first write there will be
|
|
|
|
// no response object constructed for http respones with no content 204.
|
|
|
|
// We notify header completion here.
|
|
|
|
HeadersCompleted();
|
|
|
|
return;
|
|
|
|
}
|
2017-05-02 22:28:14 +00:00
|
|
|
stream_->Finalize(0);
|
2016-11-27 19:10:06 +00:00
|
|
|
} else {
|
2017-05-02 22:28:14 +00:00
|
|
|
stream_->Finalize(status.error());
|
|
|
|
NotifyStartError(status);
|
2016-11-27 19:10:06 +00:00
|
|
|
}
|
2015-05-29 15:54:00 +00:00
|
|
|
}
|
|
|
|
|
2017-05-02 22:28:14 +00:00
|
|
|
void URLRequestFetchJob::ClearStream() {
|
|
|
|
stream_->RemoveReadObserver(this);
|
|
|
|
stream_ = nullptr;
|
2016-05-30 11:31:00 +00:00
|
|
|
}
|
|
|
|
|
2015-05-29 15:54:00 +00:00
|
|
|
} // namespace atom
|