Fix compilation errors on OS X

This commit is contained in:
Cheng Zhao 2016-03-08 23:28:53 +09:00
parent 4503aafe64
commit 5fae63a2f5
93 changed files with 242 additions and 317 deletions

View file

@ -44,6 +44,7 @@ URLRequestAsarJob::URLRequestAsarJob(
: net::URLRequestJob(request, network_delegate),
type_(TYPE_ERROR),
remaining_bytes_(0),
range_parse_result_(net::OK),
weak_ptr_factory_(this) {}
URLRequestAsarJob::~URLRequestAsarJob() {}
@ -99,7 +100,7 @@ void URLRequestAsarJob::InitializeFileJob(
void URLRequestAsarJob::Start() {
if (type_ == TYPE_ASAR) {
remaining_bytes_ = static_cast<int64>(file_info_.size);
remaining_bytes_ = static_cast<int64_t>(file_info_.size);
int flags = base::File::FLAG_OPEN |
base::File::FLAG_READ |
@ -131,18 +132,14 @@ void URLRequestAsarJob::Kill() {
URLRequestJob::Kill();
}
bool URLRequestAsarJob::ReadRawData(net::IOBuffer* dest,
int dest_size,
int* bytes_read) {
int URLRequestAsarJob::ReadRawData(net::IOBuffer* dest, int dest_size) {
if (remaining_bytes_ < dest_size)
dest_size = static_cast<int>(remaining_bytes_);
// If we should copy zero bytes because |remaining_bytes_| is zero, short
// circuit here.
if (!dest_size) {
*bytes_read = 0;
return true;
}
if (!dest_size)
return 0;
int rv = stream_->Read(dest,
dest_size,
@ -150,20 +147,11 @@ bool URLRequestAsarJob::ReadRawData(net::IOBuffer* dest,
weak_ptr_factory_.GetWeakPtr(),
make_scoped_refptr(dest)));
if (rv >= 0) {
// Data is immediately available.
*bytes_read = rv;
remaining_bytes_ -= rv;
DCHECK_GE(remaining_bytes_, 0);
return true;
}
// Otherwise, a read error occured. We may just need to wait...
if (rv == net::ERR_IO_PENDING) {
SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
} else {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv));
}
return false;
return rv;
}
bool URLRequestAsarJob::IsRedirectResponse(GURL* location,
@ -214,15 +202,16 @@ void URLRequestAsarJob::SetExtraRequestHeaders(
const net::HttpRequestHeaders& headers) {
std::string range_header;
if (headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) {
// We only care about "Range" header here.
// This job only cares about the Range header. This method stashes the value
// for later use in DidOpen(), which is responsible for some of the range
// validation as well. NotifyStartError is not legal to call here since
// the job has not started.
std::vector<net::HttpByteRange> ranges;
if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) {
if (ranges.size() == 1) {
byte_range_ = ranges[0];
} else {
NotifyDone(net::URLRequestStatus(
net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE;
}
}
}
@ -274,7 +263,14 @@ void URLRequestAsarJob::DidFetchMetaInfo(const FileMetaInfo* meta_info) {
void URLRequestAsarJob::DidOpen(int result) {
if (result != net::OK) {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED,
result));
return;
}
if (range_parse_result_ != net::OK) {
NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED,
range_parse_result_));
return;
}
@ -289,8 +285,9 @@ void URLRequestAsarJob::DidOpen(int result) {
}
} else {
if (!byte_range_.ComputeBounds(meta_info_.file_size)) {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
NotifyStartError(
net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
return;
}
@ -315,17 +312,19 @@ void URLRequestAsarJob::DidOpen(int result) {
}
}
void URLRequestAsarJob::DidSeek(int64 result) {
void URLRequestAsarJob::DidSeek(int64_t result) {
if (type_ == TYPE_ASAR) {
if (result != static_cast<int64>(file_info_.offset)) {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
if (result != static_cast<int64_t>(file_info_.offset)) {
NotifyStartError(
net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
return;
}
} else {
if (result != byte_range_.first_byte_position()) {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
NotifyStartError(
net::URLRequestStatus(net::URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
return;
}
}
@ -334,21 +333,14 @@ void URLRequestAsarJob::DidSeek(int64 result) {
}
void URLRequestAsarJob::DidRead(scoped_refptr<net::IOBuffer> buf, int result) {
if (result > 0) {
SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status
if (result >= 0) {
remaining_bytes_ -= result;
DCHECK_GE(remaining_bytes_, 0);
}
buf = NULL;
if (result == 0) {
NotifyDone(net::URLRequestStatus());
} else if (result < 0) {
NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
}
NotifyReadComplete(result);
ReadRawDataComplete(result);
}
} // namespace asar

View file

@ -54,9 +54,7 @@ class URLRequestAsarJob : public net::URLRequestJob {
// net::URLRequestJob:
void Start() override;
void Kill() override;
bool ReadRawData(net::IOBuffer* buf,
int buf_size,
int* bytes_read) override;
int ReadRawData(net::IOBuffer* buf, int buf_size) override;
bool IsRedirectResponse(GURL* location, int* http_status_code) override;
net::Filter* SetupFilter() const override;
bool GetMimeType(std::string* mime_type) const override;
@ -72,7 +70,7 @@ class URLRequestAsarJob : public net::URLRequestJob {
FileMetaInfo();
// Size of the file.
int64 file_size;
int64_t file_size;
// Mime type associated with the file.
std::string mime_type;
// Result returned from GetMimeTypeFromFile(), i.e. flag showing whether
@ -97,7 +95,7 @@ class URLRequestAsarJob : public net::URLRequestJob {
// Callback after seeking to the beginning of |byte_range_| in the file
// on a background thread.
void DidSeek(int64 result);
void DidSeek(int64_t result);
// Callback after data is asynchronously read from the file into |buf|.
void DidRead(scoped_refptr<net::IOBuffer> buf, int result);
@ -119,7 +117,9 @@ class URLRequestAsarJob : public net::URLRequestJob {
scoped_refptr<base::TaskRunner> file_task_runner_;
net::HttpByteRange byte_range_;
int64 remaining_bytes_;
int64_t remaining_bytes_;
net::Error range_parse_result_;
base::WeakPtrFactory<URLRequestAsarJob> weak_ptr_factory_;

View file

@ -78,7 +78,7 @@ void ToDictionary(base::DictionaryValue* details, net::URLRequest* request) {
scoped_ptr<base::ListValue> list(new base::ListValue);
GetUploadData(list.get(), request);
if (!list->empty())
details->Set("uploadData", list.Pass());
details->Set("uploadData", std::move(list));
}
void ToDictionary(base::DictionaryValue* details,
@ -87,7 +87,7 @@ void ToDictionary(base::DictionaryValue* details,
net::HttpRequestHeaders::Iterator it(headers);
while (it.GetNext())
dict->SetString(it.name(), it.value());
details->Set("requestHeaders", dict.Pass());
details->Set("requestHeaders", std::move(dict));
}
void ToDictionary(base::DictionaryValue* details,
@ -107,10 +107,10 @@ void ToDictionary(base::DictionaryValue* details,
} else {
scoped_ptr<base::ListValue> values(new base::ListValue);
values->AppendString(value);
dict->Set(key, values.Pass());
dict->Set(key, std::move(values));
}
}
details->Set("responseHeaders", dict.Pass());
details->Set("responseHeaders", std::move(dict));
details->SetString("statusLine", headers->GetStatusLine());
details->SetInteger("statusCode", headers->response_code());
}

View file

@ -81,7 +81,7 @@ class JsAsker : public RequestJob {
void OnResponse(bool success, scoped_ptr<base::Value> value) {
int error = net::ERR_NOT_IMPLEMENTED;
if (success && value && !internal::IsErrorOptions(value.get(), &error)) {
StartAsync(value.Pass());
StartAsync(std::move(value));
} else {
RequestJob::NotifyStartError(
net::URLRequestStatus(net::URLRequestStatus::FAILED, error));

View file

@ -181,7 +181,7 @@ int URLRequestFetchJob::DataAvailable(net::IOBuffer* buffer, int num_bytes) {
pending_buffer_ = nullptr;
pending_buffer_size_ = 0;
NotifyReadComplete(bytes_read);
ReadRawDataComplete(bytes_read);
return bytes_read;
}
@ -190,18 +190,15 @@ void URLRequestFetchJob::Kill() {
fetcher_.reset();
}
bool URLRequestFetchJob::ReadRawData(net::IOBuffer* dest,
int dest_size,
int* bytes_read) {
int URLRequestFetchJob::ReadRawData(net::IOBuffer* dest, int dest_size) {
if (GetResponseCode() == 204) {
*bytes_read = 0;
request()->set_received_response_content_length(prefilter_bytes_read());
return true;
return 0;
}
pending_buffer_ = dest;
pending_buffer_size_ = dest_size;
SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
return false;
return dest_size;
}
bool URLRequestFetchJob::GetMimeType(std::string* mime_type) const {
@ -234,9 +231,10 @@ void URLRequestFetchJob::OnURLFetchComplete(const net::URLFetcher* source) {
pending_buffer_ = nullptr;
pending_buffer_size_ = 0;
NotifyDone(fetcher_->GetStatus());
if (fetcher_->GetStatus().is_success())
NotifyReadComplete(0);
ReadRawDataComplete(0);
else
NotifyStartError(fetcher_->GetStatus());
}
} // namespace atom

View file

@ -31,9 +31,7 @@ class URLRequestFetchJob : public JsAsker<net::URLRequestJob>,
// net::URLRequestJob:
void Kill() override;
bool ReadRawData(net::IOBuffer* buf,
int buf_size,
int* bytes_read) override;
int ReadRawData(net::IOBuffer* buf, int buf_size) override;
bool GetMimeType(std::string* mime_type) const override;
void GetResponseInfo(net::HttpResponseInfo* info) override;
int GetResponseCode() const override;