| OLD | NEW |
| 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/browser/android/url_request_content_job.h" | 5 #include "content/browser/android/url_request_content_job.h" |
| 6 | 6 |
| 7 #include "base/android/content_uri_utils.h" | 7 #include "base/android/content_uri_utils.h" |
| 8 #include "base/bind.h" | 8 #include "base/bind.h" |
| 9 #include "base/files/file_util.h" | 9 #include "base/files/file_util.h" |
| 10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| 11 #include "base/task_runner.h" | 11 #include "base/task_runner.h" |
| 12 #include "net/base/file_stream.h" | 12 #include "net/base/file_stream.h" |
| 13 #include "net/base/io_buffer.h" | 13 #include "net/base/io_buffer.h" |
| 14 #include "net/base/net_errors.h" | |
| 15 #include "net/http/http_util.h" | 14 #include "net/http/http_util.h" |
| 16 #include "net/url_request/url_request_error_job.h" | 15 #include "net/url_request/url_request_error_job.h" |
| 17 #include "url/gurl.h" | 16 #include "url/gurl.h" |
| 18 | 17 |
| 19 namespace content { | 18 namespace content { |
| 20 | 19 |
| 21 // TODO(qinmin): Refactor this class to reuse the common code in | 20 // TODO(qinmin): Refactor this class to reuse the common code in |
| 22 // url_request_file_job.cc. | 21 // url_request_file_job.cc. |
| 23 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() | 22 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() |
| 24 : content_exists(false), | 23 : content_exists(false), |
| 25 content_size(0) { | 24 content_size(0) { |
| 26 } | 25 } |
| 27 | 26 |
| 28 URLRequestContentJob::URLRequestContentJob( | 27 URLRequestContentJob::URLRequestContentJob( |
| 29 net::URLRequest* request, | 28 net::URLRequest* request, |
| 30 net::NetworkDelegate* network_delegate, | 29 net::NetworkDelegate* network_delegate, |
| 31 const base::FilePath& content_path, | 30 const base::FilePath& content_path, |
| 32 const scoped_refptr<base::TaskRunner>& content_task_runner) | 31 const scoped_refptr<base::TaskRunner>& content_task_runner) |
| 33 : net::URLRequestJob(request, network_delegate), | 32 : net::URLRequestJob(request, network_delegate), |
| 34 content_path_(content_path), | 33 content_path_(content_path), |
| 35 stream_(new net::FileStream(content_task_runner)), | 34 stream_(new net::FileStream(content_task_runner)), |
| 36 content_task_runner_(content_task_runner), | 35 content_task_runner_(content_task_runner), |
| 36 range_parse_result_(net::OK), |
| 37 remaining_bytes_(0), | 37 remaining_bytes_(0), |
| 38 io_pending_(false), | 38 io_pending_(false), |
| 39 weak_ptr_factory_(this) {} | 39 weak_ptr_factory_(this) {} |
| 40 | 40 |
| 41 void URLRequestContentJob::Start() { | 41 void URLRequestContentJob::Start() { |
| 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); | 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); |
| 43 content_task_runner_->PostTaskAndReply( | 43 content_task_runner_->PostTaskAndReply( |
| 44 FROM_HERE, | 44 FROM_HERE, |
| 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, | 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, |
| 46 base::Unretained(meta_info)), | 46 base::Unretained(meta_info)), |
| 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, | 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, |
| 48 weak_ptr_factory_.GetWeakPtr(), | 48 weak_ptr_factory_.GetWeakPtr(), |
| 49 base::Owned(meta_info))); | 49 base::Owned(meta_info))); |
| 50 } | 50 } |
| 51 | 51 |
| 52 void URLRequestContentJob::Kill() { | 52 void URLRequestContentJob::Kill() { |
| 53 stream_.reset(); | 53 stream_.reset(); |
| 54 weak_ptr_factory_.InvalidateWeakPtrs(); | 54 weak_ptr_factory_.InvalidateWeakPtrs(); |
| 55 | 55 |
| 56 net::URLRequestJob::Kill(); | 56 net::URLRequestJob::Kill(); |
| 57 } | 57 } |
| 58 | 58 |
| 59 bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest, | 59 int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) { |
| 60 int dest_size, | |
| 61 int* bytes_read) { | |
| 62 DCHECK_GT(dest_size, 0); | 60 DCHECK_GT(dest_size, 0); |
| 63 DCHECK(bytes_read); | |
| 64 DCHECK_GE(remaining_bytes_, 0); | 61 DCHECK_GE(remaining_bytes_, 0); |
| 65 | 62 |
| 66 if (remaining_bytes_ < dest_size) | 63 if (remaining_bytes_ < dest_size) |
| 67 dest_size = static_cast<int>(remaining_bytes_); | 64 dest_size = remaining_bytes_; |
| 68 | 65 |
| 69 // If we should copy zero bytes because |remaining_bytes_| is zero, short | 66 // If we should copy zero bytes because |remaining_bytes_| is zero, short |
| 70 // circuit here. | 67 // circuit here. |
| 71 if (!dest_size) { | 68 if (!dest_size) |
| 72 *bytes_read = 0; | 69 return 0; |
| 73 return true; | |
| 74 } | |
| 75 | 70 |
| 76 int rv = | 71 int rv = stream_->Read(dest, dest_size, |
| 77 stream_->Read(dest, dest_size, base::Bind(&URLRequestContentJob::DidRead, | 72 base::Bind(&URLRequestContentJob::DidRead, |
| 78 weak_ptr_factory_.GetWeakPtr(), | 73 weak_ptr_factory_.GetWeakPtr())); |
| 79 make_scoped_refptr(dest))); | |
| 80 if (rv >= 0) { | |
| 81 // Data is immediately available. | |
| 82 *bytes_read = rv; | |
| 83 remaining_bytes_ -= rv; | |
| 84 DCHECK_GE(remaining_bytes_, 0); | |
| 85 return true; | |
| 86 } | |
| 87 | |
| 88 // Otherwise, a read error occured. We may just need to wait... | |
| 89 if (rv == net::ERR_IO_PENDING) { | 74 if (rv == net::ERR_IO_PENDING) { |
| 90 io_pending_ = true; | 75 io_pending_ = true; |
| 91 SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); | 76 } else if (rv > 0) { |
| 92 } else { | 77 remaining_bytes_ -= rv; |
| 93 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); | |
| 94 } | 78 } |
| 95 return false; | 79 DCHECK_GE(remaining_bytes_, 0); |
| 80 return rv; |
| 96 } | 81 } |
| 97 | 82 |
| 98 bool URLRequestContentJob::IsRedirectResponse(GURL* location, | 83 bool URLRequestContentJob::IsRedirectResponse(GURL* location, |
| 99 int* http_status_code) { | 84 int* http_status_code) { |
| 100 return false; | 85 return false; |
| 101 } | 86 } |
| 102 | 87 |
| 103 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { | 88 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { |
| 104 DCHECK(request_); | 89 DCHECK(request_); |
| 105 if (!meta_info_.mime_type.empty()) { | 90 if (!meta_info_.mime_type.empty()) { |
| 106 *mime_type = meta_info_.mime_type; | 91 *mime_type = meta_info_.mime_type; |
| 107 return true; | 92 return true; |
| 108 } | 93 } |
| 109 return false; | 94 return false; |
| 110 } | 95 } |
| 111 | 96 |
| 112 void URLRequestContentJob::SetExtraRequestHeaders( | 97 void URLRequestContentJob::SetExtraRequestHeaders( |
| 113 const net::HttpRequestHeaders& headers) { | 98 const net::HttpRequestHeaders& headers) { |
| 114 std::string range_header; | 99 std::string range_header; |
| 115 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) | 100 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) |
| 116 return; | 101 return; |
| 117 | 102 |
| 118 // We only care about "Range" header here. | 103 // Currently this job only cares about the Range header. Note that validation |
| 104 // is deferred to DidOpen(), because NotifyStartError is not legal to call |
| 105 // since the job has not started. |
| 119 std::vector<net::HttpByteRange> ranges; | 106 std::vector<net::HttpByteRange> ranges; |
| 120 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { | 107 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { |
| 121 if (ranges.size() == 1) { | 108 if (ranges.size() == 1) { |
| 122 byte_range_ = ranges[0]; | 109 byte_range_ = ranges[0]; |
| 123 } else { | 110 } else { |
| 124 // We don't support multiple range requests. | 111 // We don't support multiple range requests. |
| 125 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 112 range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; |
| 126 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | |
| 127 } | 113 } |
| 128 } | 114 } |
| 129 } | 115 } |
| 130 | 116 |
| 131 URLRequestContentJob::~URLRequestContentJob() {} | 117 URLRequestContentJob::~URLRequestContentJob() {} |
| 132 | 118 |
| 133 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, | 119 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, |
| 134 ContentMetaInfo* meta_info) { | 120 ContentMetaInfo* meta_info) { |
| 135 base::File::Info file_info; | 121 base::File::Info file_info; |
| 136 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); | 122 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 153 base::File::FLAG_ASYNC; | 139 base::File::FLAG_ASYNC; |
| 154 int rv = stream_->Open(content_path_, flags, | 140 int rv = stream_->Open(content_path_, flags, |
| 155 base::Bind(&URLRequestContentJob::DidOpen, | 141 base::Bind(&URLRequestContentJob::DidOpen, |
| 156 weak_ptr_factory_.GetWeakPtr())); | 142 weak_ptr_factory_.GetWeakPtr())); |
| 157 if (rv != net::ERR_IO_PENDING) | 143 if (rv != net::ERR_IO_PENDING) |
| 158 DidOpen(rv); | 144 DidOpen(rv); |
| 159 } | 145 } |
| 160 | 146 |
| 161 void URLRequestContentJob::DidOpen(int result) { | 147 void URLRequestContentJob::DidOpen(int result) { |
| 162 if (result != net::OK) { | 148 if (result != net::OK) { |
| 163 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | 149 NotifyStartError( |
| 150 net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| 151 return; |
| 152 } |
| 153 |
| 154 if (range_parse_result_ != net::OK) { |
| 155 NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| 156 range_parse_result_)); |
| 164 return; | 157 return; |
| 165 } | 158 } |
| 166 | 159 |
| 167 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { | 160 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { |
| 168 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 161 NotifyStartError(net::URLRequestStatus( |
| 169 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 162 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 170 return; | 163 return; |
| 171 } | 164 } |
| 172 | 165 |
| 173 remaining_bytes_ = byte_range_.last_byte_position() - | 166 remaining_bytes_ = byte_range_.last_byte_position() - |
| 174 byte_range_.first_byte_position() + 1; | 167 byte_range_.first_byte_position() + 1; |
| 175 DCHECK_GE(remaining_bytes_, 0); | 168 DCHECK_GE(remaining_bytes_, 0); |
| 176 | 169 |
| 177 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { | 170 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { |
| 178 int rv = stream_->Seek(byte_range_.first_byte_position(), | 171 int rv = stream_->Seek(byte_range_.first_byte_position(), |
| 179 base::Bind(&URLRequestContentJob::DidSeek, | 172 base::Bind(&URLRequestContentJob::DidSeek, |
| 180 weak_ptr_factory_.GetWeakPtr())); | 173 weak_ptr_factory_.GetWeakPtr())); |
| 181 if (rv != net::ERR_IO_PENDING) { | 174 if (rv != net::ERR_IO_PENDING) { |
| 182 // stream_->Seek() failed, so pass an intentionally erroneous value | 175 // stream_->Seek() failed, so pass an intentionally erroneous value |
| 183 // into DidSeek(). | 176 // into DidSeek(). |
| 184 DidSeek(-1); | 177 DidSeek(-1); |
| 185 } | 178 } |
| 186 } else { | 179 } else { |
| 187 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() | 180 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() |
| 188 // the value that would mean seek success. This way we skip the code | 181 // the value that would mean seek success. This way we skip the code |
| 189 // handling seek failure. | 182 // handling seek failure. |
| 190 DidSeek(byte_range_.first_byte_position()); | 183 DidSeek(byte_range_.first_byte_position()); |
| 191 } | 184 } |
| 192 } | 185 } |
| 193 | 186 |
| 194 void URLRequestContentJob::DidSeek(int64 result) { | 187 void URLRequestContentJob::DidSeek(int64 result) { |
| 195 if (result != byte_range_.first_byte_position()) { | 188 if (result != byte_range_.first_byte_position()) { |
| 196 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 189 NotifyStartError(net::URLRequestStatus( |
| 197 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 190 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 198 return; | 191 return; |
| 199 } | 192 } |
| 200 | 193 |
| 201 set_expected_content_size(remaining_bytes_); | 194 set_expected_content_size(remaining_bytes_); |
| 202 NotifyHeadersComplete(); | 195 NotifyHeadersComplete(); |
| 203 } | 196 } |
| 204 | 197 |
| 205 void URLRequestContentJob::DidRead(scoped_refptr<net::IOBuffer> buf, | 198 void URLRequestContentJob::DidRead(int result) { |
| 206 int result) { | 199 DCHECK(io_pending_); |
| 200 io_pending_ = false; |
| 201 |
| 207 if (result > 0) { | 202 if (result > 0) { |
| 208 SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status | |
| 209 remaining_bytes_ -= result; | 203 remaining_bytes_ -= result; |
| 210 DCHECK_GE(remaining_bytes_, 0); | 204 DCHECK_GE(remaining_bytes_, 0); |
| 211 } | 205 } |
| 212 | 206 |
| 213 DCHECK(io_pending_); | 207 ReadRawDataComplete(result); |
| 214 io_pending_ = false; | |
| 215 | |
| 216 if (result == 0) { | |
| 217 NotifyDone(net::URLRequestStatus()); | |
| 218 } else if (result < 0) { | |
| 219 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | |
| 220 } | |
| 221 | |
| 222 NotifyReadComplete(result); | |
| 223 } | 208 } |
| 224 | 209 |
| 225 } // namespace content | 210 } // namespace content |
| OLD | NEW |