| Index: content/browser/android/url_request_content_job.cc
|
| diff --git a/content/browser/android/url_request_content_job.cc b/content/browser/android/url_request_content_job.cc
|
| index 1bcbf2106c0ec893dad869818f33bdd8502c1022..6e661a21462bcce9e6a6d99e489cb2efefdd0e87 100644
|
| --- a/content/browser/android/url_request_content_job.cc
|
| +++ b/content/browser/android/url_request_content_job.cc
|
| @@ -11,6 +11,7 @@
|
| #include "base/task_runner.h"
|
| #include "net/base/file_stream.h"
|
| #include "net/base/io_buffer.h"
|
| +#include "net/base/net_errors.h"
|
| #include "net/http/http_util.h"
|
| #include "net/url_request/url_request_error_job.h"
|
| #include "url/gurl.h"
|
| @@ -33,7 +34,6 @@ URLRequestContentJob::URLRequestContentJob(
|
| content_path_(content_path),
|
| stream_(new net::FileStream(content_task_runner)),
|
| content_task_runner_(content_task_runner),
|
| - range_parse_result_(net::OK),
|
| remaining_bytes_(0),
|
| io_pending_(false),
|
| weak_ptr_factory_(this) {}
|
| @@ -56,28 +56,43 @@ void URLRequestContentJob::Kill() {
|
| net::URLRequestJob::Kill();
|
| }
|
|
|
| -int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) {
|
| +bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest,
|
| + int dest_size,
|
| + int* bytes_read) {
|
| DCHECK_GT(dest_size, 0);
|
| + DCHECK(bytes_read);
|
| DCHECK_GE(remaining_bytes_, 0);
|
|
|
| if (remaining_bytes_ < dest_size)
|
| - dest_size = remaining_bytes_;
|
| + dest_size = static_cast<int>(remaining_bytes_);
|
|
|
| // If we should copy zero bytes because |remaining_bytes_| is zero, short
|
| // circuit here.
|
| - if (!dest_size)
|
| - return 0;
|
| + if (!dest_size) {
|
| + *bytes_read = 0;
|
| + return true;
|
| + }
|
|
|
| - int rv = stream_->Read(dest, dest_size,
|
| - base::Bind(&URLRequestContentJob::DidRead,
|
| - weak_ptr_factory_.GetWeakPtr()));
|
| + int rv =
|
| + stream_->Read(dest, dest_size, base::Bind(&URLRequestContentJob::DidRead,
|
| + weak_ptr_factory_.GetWeakPtr(),
|
| + make_scoped_refptr(dest)));
|
| + if (rv >= 0) {
|
| + // Data is immediately available.
|
| + *bytes_read = rv;
|
| + remaining_bytes_ -= rv;
|
| + DCHECK_GE(remaining_bytes_, 0);
|
| + return true;
|
| + }
|
| +
|
| + // Otherwise, a read error occured. We may just need to wait...
|
| if (rv == net::ERR_IO_PENDING) {
|
| io_pending_ = true;
|
| - } else if (rv > 0) {
|
| - remaining_bytes_ -= rv;
|
| + SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
|
| + } else {
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv));
|
| }
|
| - DCHECK_GE(remaining_bytes_, 0);
|
| - return rv;
|
| + return false;
|
| }
|
|
|
| bool URLRequestContentJob::IsRedirectResponse(GURL* location,
|
| @@ -100,16 +115,15 @@ void URLRequestContentJob::SetExtraRequestHeaders(
|
| if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header))
|
| return;
|
|
|
| - // Currently this job only cares about the Range header. Note that validation
|
| - // is deferred to DidOpen(), because NotifyStartError is not legal to call
|
| - // since the job has not started.
|
| + // We only care about "Range" header here.
|
| std::vector<net::HttpByteRange> ranges;
|
| if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) {
|
| if (ranges.size() == 1) {
|
| byte_range_ = ranges[0];
|
| } else {
|
| // We don't support multiple range requests.
|
| - range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE;
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
| + net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
| }
|
| }
|
| }
|
| @@ -146,20 +160,13 @@ void URLRequestContentJob::DidFetchMetaInfo(const ContentMetaInfo* meta_info) {
|
|
|
| void URLRequestContentJob::DidOpen(int result) {
|
| if (result != net::OK) {
|
| - NotifyStartError(
|
| - net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
|
| - return;
|
| - }
|
| -
|
| - if (range_parse_result_ != net::OK) {
|
| - NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
| - range_parse_result_));
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
|
| return;
|
| }
|
|
|
| if (!byte_range_.ComputeBounds(meta_info_.content_size)) {
|
| - NotifyStartError(net::URLRequestStatus(
|
| - net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
| + net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
| return;
|
| }
|
|
|
| @@ -186,8 +193,8 @@ void URLRequestContentJob::DidOpen(int result) {
|
|
|
| void URLRequestContentJob::DidSeek(int64 result) {
|
| if (result != byte_range_.first_byte_position()) {
|
| - NotifyStartError(net::URLRequestStatus(
|
| - net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED,
|
| + net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
|
| return;
|
| }
|
|
|
| @@ -195,16 +202,24 @@ void URLRequestContentJob::DidSeek(int64 result) {
|
| NotifyHeadersComplete();
|
| }
|
|
|
| -void URLRequestContentJob::DidRead(int result) {
|
| - DCHECK(io_pending_);
|
| - io_pending_ = false;
|
| -
|
| +void URLRequestContentJob::DidRead(scoped_refptr<net::IOBuffer> buf,
|
| + int result) {
|
| if (result > 0) {
|
| + SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status
|
| remaining_bytes_ -= result;
|
| DCHECK_GE(remaining_bytes_, 0);
|
| }
|
|
|
| - ReadRawDataComplete(result);
|
| + DCHECK(io_pending_);
|
| + io_pending_ = false;
|
| +
|
| + if (result == 0) {
|
| + NotifyDone(net::URLRequestStatus());
|
| + } else if (result < 0) {
|
| + NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result));
|
| + }
|
| +
|
| + NotifyReadComplete(result);
|
| }
|
|
|
| } // namespace content
|
|
|