Chromium Code Reviews| Index: content/browser/android/url_request_content_job.cc |
| diff --git a/content/browser/android/url_request_content_job.cc b/content/browser/android/url_request_content_job.cc |
| index 594e89f35e974c9dec70466a6cd5532dd367347c..9a70d3850eb0949478e395385face14d3fdd38e9 100644 |
| --- a/content/browser/android/url_request_content_job.cc |
| +++ b/content/browser/android/url_request_content_job.cc |
| @@ -11,7 +11,6 @@ |
| #include "base/task_runner.h" |
| #include "net/base/file_stream.h" |
| #include "net/base/io_buffer.h" |
| -#include "net/base/net_errors.h" |
| #include "net/http/http_util.h" |
| #include "net/url_request/url_request_error_job.h" |
| #include "url/gurl.h" |
| @@ -34,6 +33,7 @@ URLRequestContentJob::URLRequestContentJob( |
| content_path_(content_path), |
| stream_(new net::FileStream(content_task_runner)), |
| content_task_runner_(content_task_runner), |
| + range_parse_result_(net::OK), |
| remaining_bytes_(0), |
| io_pending_(false), |
| weak_ptr_factory_(this) {} |
| @@ -56,44 +56,29 @@ void URLRequestContentJob::Kill() { |
| net::URLRequestJob::Kill(); |
| } |
| -bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest, |
| - int dest_size, |
| - int* bytes_read) { |
| +int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) { |
| DCHECK_GT(dest_size, 0); |
| - DCHECK(bytes_read); |
| DCHECK_GE(remaining_bytes_, 0); |
| if (remaining_bytes_ < dest_size) |
| - dest_size = static_cast<int>(remaining_bytes_); |
| + dest_size = remaining_bytes_; |
| // If we should copy zero bytes because |remaining_bytes_| is zero, short |
| // circuit here. |
| - if (!dest_size) { |
| - *bytes_read = 0; |
| - return true; |
| - } |
| + if (!dest_size) |
| + return 0; |
| - int rv = stream_->Read(dest, |
| - dest_size, |
| - base::Bind(&URLRequestContentJob::DidRead, |
| - weak_ptr_factory_.GetWeakPtr(), |
| - make_scoped_refptr(dest))); |
| - if (rv >= 0) { |
| - // Data is immediately available. |
| - *bytes_read = rv; |
| - remaining_bytes_ -= rv; |
| - DCHECK_GE(remaining_bytes_, 0); |
| - return true; |
| - } |
| - |
| - // Otherwise, a read error occured. We may just need to wait... |
| + int rv = |
| + stream_->Read(dest, dest_size, base::Bind(&URLRequestContentJob::DidRead, |
| + weak_ptr_factory_.GetWeakPtr(), |
| + make_scoped_refptr(dest))); |
| if (rv == net::ERR_IO_PENDING) { |
| io_pending_ = true; |
| - SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); |
| - } else { |
| - NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); |
| + } else if (rv > 0) { |
| + remaining_bytes_ -= rv; |
| } |
| - return false; |
| + DCHECK_GE(remaining_bytes_, 0); |
| + return rv; |
| } |
| bool URLRequestContentJob::IsRedirectResponse(GURL* location, |
| @@ -110,6 +95,10 @@ bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { |
| return false; |
| } |
| +// Extracts headers that this job cares about from the supplied request headers. |
| +// Currently this job only cares about the Range header. Note that validation is |
| +// deferred to DidOpen(), because NotifyStartError is not legal to call since |
| +// the job has not started. |
|
mmenke
2015/10/28 16:40:42
nit: See other comments about these.
xunjieli
2015/10/28 21:11:52
Done. Sorry, missed this one.
|
| void URLRequestContentJob::SetExtraRequestHeaders( |
| const net::HttpRequestHeaders& headers) { |
| std::string range_header; |
| @@ -123,9 +112,8 @@ void URLRequestContentJob::SetExtraRequestHeaders( |
| byte_range_ = ranges[0]; |
| } else { |
| // We don't support multiple range requests. |
| - NotifyDone(net::URLRequestStatus( |
| - net::URLRequestStatus::FAILED, |
| - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + // Saves the failure and report it in DidOpen(). |
| + range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; |
| } |
| } |
| } |
| @@ -162,13 +150,20 @@ void URLRequestContentJob::DidFetchMetaInfo(const ContentMetaInfo* meta_info) { |
| void URLRequestContentJob::DidOpen(int result) { |
| if (result != net::OK) { |
| - NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| + NotifyStartError( |
| + net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| + return; |
| + } |
| + |
| + if (range_parse_result_ != net::OK) { |
| + NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| + range_parse_result_)); |
| return; |
| } |
| if (!byte_range_.ComputeBounds(meta_info_.content_size)) { |
| - NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + NotifyStartError(net::URLRequestStatus( |
| + net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| return; |
| } |
| @@ -195,8 +190,8 @@ void URLRequestContentJob::DidOpen(int result) { |
| void URLRequestContentJob::DidSeek(int64 result) { |
| if (result != byte_range_.first_byte_position()) { |
| - NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + NotifyStartError(net::URLRequestStatus( |
| + net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| return; |
| } |
| @@ -204,24 +199,17 @@ void URLRequestContentJob::DidSeek(int64 result) { |
| NotifyHeadersComplete(); |
| } |
| -void URLRequestContentJob::DidRead( |
| - scoped_refptr<net::IOBuffer> buf, int result) { |
| - if (result > 0) { |
| - SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status |
| - remaining_bytes_ -= result; |
| - DCHECK_GE(remaining_bytes_, 0); |
| - } |
| - |
| +void URLRequestContentJob::DidRead(scoped_refptr<net::IOBuffer> buf, |
| + int result) { |
| DCHECK(io_pending_); |
| io_pending_ = false; |
| - if (result == 0) { |
| - NotifyDone(net::URLRequestStatus()); |
| - } else if (result < 0) { |
| - NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| + if (result > 0) { |
| + remaining_bytes_ -= result; |
| + DCHECK_GE(remaining_bytes_, 0); |
| } |
| - NotifyReadComplete(result); |
| + ReadRawDataComplete(result); |
| } |
| } // namespace content |