Chromium Code Reviews| Index: net/url_request/url_request_file_job.cc |
| diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc |
| index b035c0d701b92ad55faf8d4564633fc35ca836c4..845a67eb067242d17d0511a28c80ea741271feab 100644 |
| --- a/net/url_request/url_request_file_job.cc |
| +++ b/net/url_request/url_request_file_job.cc |
| @@ -63,6 +63,7 @@ URLRequestFileJob::URLRequestFileJob( |
| stream_(new FileStream(file_task_runner)), |
| file_task_runner_(file_task_runner), |
| remaining_bytes_(0), |
| + range_parse_result_(net::OK), |
| weak_ptr_factory_(this) {} |
| void URLRequestFileJob::Start() { |
| @@ -83,22 +84,17 @@ void URLRequestFileJob::Kill() { |
| URLRequestJob::Kill(); |
| } |
| -bool URLRequestFileJob::ReadRawData(IOBuffer* dest, |
| - int dest_size, |
| - int* bytes_read) { |
| +int URLRequestFileJob::ReadRawData(IOBuffer* dest, int dest_size) { |
| DCHECK_NE(dest_size, 0); |
| - DCHECK(bytes_read); |
| DCHECK_GE(remaining_bytes_, 0); |
| if (remaining_bytes_ < dest_size) |
| - dest_size = static_cast<int>(remaining_bytes_); |
| + dest_size = remaining_bytes_; |
| // If we should copy zero bytes because |remaining_bytes_| is zero, short |
| // circuit here. |
| - if (!dest_size) { |
| - *bytes_read = 0; |
| - return true; |
| - } |
| + if (!dest_size) |
| + return 0; |
| int rv = stream_->Read(dest, |
| dest_size, |
| @@ -106,20 +102,11 @@ bool URLRequestFileJob::ReadRawData(IOBuffer* dest, |
| weak_ptr_factory_.GetWeakPtr(), |
| make_scoped_refptr(dest))); |
| if (rv >= 0) { |
| - // Data is immediately available. |
| - *bytes_read = rv; |
| remaining_bytes_ -= rv; |
| DCHECK_GE(remaining_bytes_, 0); |
| - return true; |
| } |
| - // Otherwise, a read error occured. We may just need to wait... |
| - if (rv == ERR_IO_PENDING) { |
| - SetStatus(URLRequestStatus(URLRequestStatus::IO_PENDING, 0)); |
| - } else { |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); |
| - } |
| - return false; |
| + return rv; |
| } |
| bool URLRequestFileJob::IsRedirectResponse(GURL* location, |
| @@ -175,11 +162,18 @@ bool URLRequestFileJob::GetMimeType(std::string* mime_type) const { |
| return false; |
| } |
| +// Extracts headers that this job cares about from the supplied request headers. |
| +// Currently this job only cares about the Range header. Note that validation is |
| +// deferred to DidOpen, because this method may be called with a |
| +// URLRequest::Delegate call still on the stack, which means NotifyStartError is |
| +// not safe to call (it may reenter the delegate). |
|
Randy Smith (Not in Mondays)
2015/10/22 20:38:45
This comment is useful, but there are several othe
xunjieli
2015/10/23 13:43:08
Done.
I have a question though. Since SetExtraReq
Randy Smith (Not in Mondays)
2015/10/26 21:38:03
Hmmm. Good point; I think you're right. Specific
xunjieli
2015/10/27 14:17:21
Done. I've checked with Elly too.
|
| void URLRequestFileJob::SetExtraRequestHeaders( |
| const HttpRequestHeaders& headers) { |
| std::string range_header; |
| if (headers.GetHeader(HttpRequestHeaders::kRange, &range_header)) { |
| - // We only care about "Range" header here. |
| + // We only care about "Range" header here. This method stashes the value for |
| + // later use in DidRead(), which is responsible for some of the range |
|
Randy Smith (Not in Mondays)
2015/10/22 20:38:45
Should this refer to DidOpen() rather than DidRead
xunjieli
2015/10/23 13:43:08
Done.
|
| + // validation as well. |
| std::vector<HttpByteRange> ranges; |
| if (HttpUtil::ParseRangeHeader(range_header, &ranges)) { |
| if (ranges.size() == 1) { |
| @@ -189,8 +183,7 @@ void URLRequestFileJob::SetExtraRequestHeaders( |
| // because we need to do multipart encoding here. |
| // TODO(hclam): decide whether we want to support multiple range |
| // requests. |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
| - ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; |
| } |
| } |
| } |
| @@ -251,13 +244,19 @@ void URLRequestFileJob::DidFetchMetaInfo(const FileMetaInfo* meta_info) { |
| void URLRequestFileJob::DidOpen(int result) { |
| if (result != OK) { |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, result)); |
| + NotifyStartError(URLRequestStatus(URLRequestStatus::FAILED, result)); |
| + return; |
| + } |
| + |
| + if (range_parse_result_ != net::OK) { |
| + NotifyStartError( |
| + URLRequestStatus(URLRequestStatus::FAILED, range_parse_result_)); |
| return; |
| } |
| if (!byte_range_.ComputeBounds(meta_info_.file_size)) { |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
| - ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + NotifyStartError(URLRequestStatus(URLRequestStatus::FAILED, |
| + net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| return; |
| } |
| @@ -285,8 +284,8 @@ void URLRequestFileJob::DidOpen(int result) { |
| void URLRequestFileJob::DidSeek(int64 result) { |
| OnSeekComplete(result); |
| if (result != byte_range_.first_byte_position()) { |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
| - ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| + NotifyStartError(URLRequestStatus(URLRequestStatus::FAILED, |
| + ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| return; |
| } |
| @@ -295,8 +294,7 @@ void URLRequestFileJob::DidSeek(int64 result) { |
| } |
| void URLRequestFileJob::DidRead(scoped_refptr<IOBuffer> buf, int result) { |
| - if (result > 0) { |
| - SetStatus(URLRequestStatus()); // Clear the IO_PENDING status |
| + if (result >= 0) { |
| remaining_bytes_ -= result; |
| DCHECK_GE(remaining_bytes_, 0); |
| } |
| @@ -304,13 +302,7 @@ void URLRequestFileJob::DidRead(scoped_refptr<IOBuffer> buf, int result) { |
| OnReadComplete(buf.get(), result); |
| buf = NULL; |
| - if (result == 0) { |
| - NotifyDone(URLRequestStatus()); |
| - } else if (result < 0) { |
| - NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, result)); |
| - } |
| - |
| - NotifyReadComplete(result); |
| + ReadRawDataComplete(result); |
| } |
| } // namespace net |