| OLD | NEW |
| 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/browser/android/url_request_content_job.h" | 5 #include "content/browser/android/url_request_content_job.h" |
| 6 | 6 |
| 7 #include "base/android/content_uri_utils.h" | 7 #include "base/android/content_uri_utils.h" |
| 8 #include "base/bind.h" | 8 #include "base/bind.h" |
| 9 #include "base/files/file_util.h" | 9 #include "base/files/file_util.h" |
| 10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| 11 #include "base/task_runner.h" | 11 #include "base/task_runner.h" |
| 12 #include "net/base/file_stream.h" | 12 #include "net/base/file_stream.h" |
| 13 #include "net/base/io_buffer.h" | 13 #include "net/base/io_buffer.h" |
| 14 #include "net/base/net_errors.h" | |
| 15 #include "net/http/http_util.h" | 14 #include "net/http/http_util.h" |
| 16 #include "net/url_request/url_request_error_job.h" | 15 #include "net/url_request/url_request_error_job.h" |
| 17 #include "url/gurl.h" | 16 #include "url/gurl.h" |
| 18 | 17 |
| 19 namespace content { | 18 namespace content { |
| 20 | 19 |
| 21 // TODO(qinmin): Refactor this class to reuse the common code in | 20 // TODO(qinmin): Refactor this class to reuse the common code in |
| 22 // url_request_file_job.cc. | 21 // url_request_file_job.cc. |
| 23 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() | 22 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() |
| 24 : content_exists(false), | 23 : content_exists(false), |
| 25 content_size(0) { | 24 content_size(0) { |
| 26 } | 25 } |
| 27 | 26 |
| 28 URLRequestContentJob::URLRequestContentJob( | 27 URLRequestContentJob::URLRequestContentJob( |
| 29 net::URLRequest* request, | 28 net::URLRequest* request, |
| 30 net::NetworkDelegate* network_delegate, | 29 net::NetworkDelegate* network_delegate, |
| 31 const base::FilePath& content_path, | 30 const base::FilePath& content_path, |
| 32 const scoped_refptr<base::TaskRunner>& content_task_runner) | 31 const scoped_refptr<base::TaskRunner>& content_task_runner) |
| 33 : net::URLRequestJob(request, network_delegate), | 32 : net::URLRequestJob(request, network_delegate), |
| 34 content_path_(content_path), | 33 content_path_(content_path), |
| 35 stream_(new net::FileStream(content_task_runner)), | 34 stream_(new net::FileStream(content_task_runner)), |
| 36 content_task_runner_(content_task_runner), | 35 content_task_runner_(content_task_runner), |
| 36 range_parse_result_(net::OK), |
| 37 remaining_bytes_(0), | 37 remaining_bytes_(0), |
| 38 io_pending_(false), | 38 io_pending_(false), |
| 39 weak_ptr_factory_(this) {} | 39 weak_ptr_factory_(this) {} |
| 40 | 40 |
| 41 void URLRequestContentJob::Start() { | 41 void URLRequestContentJob::Start() { |
| 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); | 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); |
| 43 content_task_runner_->PostTaskAndReply( | 43 content_task_runner_->PostTaskAndReply( |
| 44 FROM_HERE, | 44 FROM_HERE, |
| 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, | 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, |
| 46 base::Unretained(meta_info)), | 46 base::Unretained(meta_info)), |
| 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, | 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, |
| 48 weak_ptr_factory_.GetWeakPtr(), | 48 weak_ptr_factory_.GetWeakPtr(), |
| 49 base::Owned(meta_info))); | 49 base::Owned(meta_info))); |
| 50 } | 50 } |
| 51 | 51 |
| 52 void URLRequestContentJob::Kill() { | 52 void URLRequestContentJob::Kill() { |
| 53 stream_.reset(); | 53 stream_.reset(); |
| 54 weak_ptr_factory_.InvalidateWeakPtrs(); | 54 weak_ptr_factory_.InvalidateWeakPtrs(); |
| 55 | 55 |
| 56 net::URLRequestJob::Kill(); | 56 net::URLRequestJob::Kill(); |
| 57 } | 57 } |
| 58 | 58 |
| 59 bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest, | 59 int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) { |
| 60 int dest_size, | |
| 61 int* bytes_read) { | |
| 62 DCHECK_GT(dest_size, 0); | 60 DCHECK_GT(dest_size, 0); |
| 63 DCHECK(bytes_read); | |
| 64 DCHECK_GE(remaining_bytes_, 0); | 61 DCHECK_GE(remaining_bytes_, 0); |
| 65 | 62 |
| 66 if (remaining_bytes_ < dest_size) | 63 if (remaining_bytes_ < dest_size) |
| 67 dest_size = static_cast<int>(remaining_bytes_); | 64 dest_size = remaining_bytes_; |
| 68 | 65 |
| 69 // If we should copy zero bytes because |remaining_bytes_| is zero, short | 66 // If we should copy zero bytes because |remaining_bytes_| is zero, short |
| 70 // circuit here. | 67 // circuit here. |
| 71 if (!dest_size) { | 68 if (!dest_size) |
| 72 *bytes_read = 0; | 69 return 0; |
| 73 return true; | |
| 74 } | |
| 75 | 70 |
| 76 int rv = stream_->Read(dest, | 71 int rv = stream_->Read(dest, dest_size, |
| 77 dest_size, | |
| 78 base::Bind(&URLRequestContentJob::DidRead, | 72 base::Bind(&URLRequestContentJob::DidRead, |
| 79 weak_ptr_factory_.GetWeakPtr(), | 73 weak_ptr_factory_.GetWeakPtr())); |
| 80 make_scoped_refptr(dest))); | |
| 81 if (rv >= 0) { | |
| 82 // Data is immediately available. | |
| 83 *bytes_read = rv; | |
| 84 remaining_bytes_ -= rv; | |
| 85 DCHECK_GE(remaining_bytes_, 0); | |
| 86 return true; | |
| 87 } | |
| 88 | |
| 89 // Otherwise, a read error occured. We may just need to wait... | |
| 90 if (rv == net::ERR_IO_PENDING) { | 74 if (rv == net::ERR_IO_PENDING) { |
| 91 io_pending_ = true; | 75 io_pending_ = true; |
| 92 SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); | 76 } else if (rv > 0) { |
| 93 } else { | 77 remaining_bytes_ -= rv; |
| 94 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); | |
| 95 } | 78 } |
| 96 return false; | 79 DCHECK_GE(remaining_bytes_, 0); |
| 80 return rv; |
| 97 } | 81 } |
| 98 | 82 |
| 99 bool URLRequestContentJob::IsRedirectResponse(GURL* location, | 83 bool URLRequestContentJob::IsRedirectResponse(GURL* location, |
| 100 int* http_status_code) { | 84 int* http_status_code) { |
| 101 return false; | 85 return false; |
| 102 } | 86 } |
| 103 | 87 |
| 104 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { | 88 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { |
| 105 DCHECK(request_); | 89 DCHECK(request_); |
| 106 if (!meta_info_.mime_type.empty()) { | 90 if (!meta_info_.mime_type.empty()) { |
| 107 *mime_type = meta_info_.mime_type; | 91 *mime_type = meta_info_.mime_type; |
| 108 return true; | 92 return true; |
| 109 } | 93 } |
| 110 return false; | 94 return false; |
| 111 } | 95 } |
| 112 | 96 |
| 113 void URLRequestContentJob::SetExtraRequestHeaders( | 97 void URLRequestContentJob::SetExtraRequestHeaders( |
| 114 const net::HttpRequestHeaders& headers) { | 98 const net::HttpRequestHeaders& headers) { |
| 115 std::string range_header; | 99 std::string range_header; |
| 116 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) | 100 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) |
| 117 return; | 101 return; |
| 118 | 102 |
| 119 // We only care about "Range" header here. | 103 // Currently this job only cares about the Range header. Note that validation |
| 104 // is deferred to DidOpen(), because NotifyStartError is not legal to call |
| 105 // since the job has not started. |
| 120 std::vector<net::HttpByteRange> ranges; | 106 std::vector<net::HttpByteRange> ranges; |
| 121 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { | 107 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { |
| 122 if (ranges.size() == 1) { | 108 if (ranges.size() == 1) { |
| 123 byte_range_ = ranges[0]; | 109 byte_range_ = ranges[0]; |
| 124 } else { | 110 } else { |
| 125 // We don't support multiple range requests. | 111 // We don't support multiple range requests. |
| 126 NotifyDone(net::URLRequestStatus( | 112 range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; |
| 127 net::URLRequestStatus::FAILED, | |
| 128 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | |
| 129 } | 113 } |
| 130 } | 114 } |
| 131 } | 115 } |
| 132 | 116 |
| 133 URLRequestContentJob::~URLRequestContentJob() {} | 117 URLRequestContentJob::~URLRequestContentJob() {} |
| 134 | 118 |
| 135 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, | 119 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, |
| 136 ContentMetaInfo* meta_info) { | 120 ContentMetaInfo* meta_info) { |
| 137 base::File::Info file_info; | 121 base::File::Info file_info; |
| 138 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); | 122 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 155 base::File::FLAG_ASYNC; | 139 base::File::FLAG_ASYNC; |
| 156 int rv = stream_->Open(content_path_, flags, | 140 int rv = stream_->Open(content_path_, flags, |
| 157 base::Bind(&URLRequestContentJob::DidOpen, | 141 base::Bind(&URLRequestContentJob::DidOpen, |
| 158 weak_ptr_factory_.GetWeakPtr())); | 142 weak_ptr_factory_.GetWeakPtr())); |
| 159 if (rv != net::ERR_IO_PENDING) | 143 if (rv != net::ERR_IO_PENDING) |
| 160 DidOpen(rv); | 144 DidOpen(rv); |
| 161 } | 145 } |
| 162 | 146 |
| 163 void URLRequestContentJob::DidOpen(int result) { | 147 void URLRequestContentJob::DidOpen(int result) { |
| 164 if (result != net::OK) { | 148 if (result != net::OK) { |
| 165 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | 149 NotifyStartError( |
| 150 net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| 151 return; |
| 152 } |
| 153 |
| 154 if (range_parse_result_ != net::OK) { |
| 155 NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| 156 range_parse_result_)); |
| 166 return; | 157 return; |
| 167 } | 158 } |
| 168 | 159 |
| 169 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { | 160 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { |
| 170 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 161 NotifyStartError(net::URLRequestStatus( |
| 171 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 162 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 172 return; | 163 return; |
| 173 } | 164 } |
| 174 | 165 |
| 175 remaining_bytes_ = byte_range_.last_byte_position() - | 166 remaining_bytes_ = byte_range_.last_byte_position() - |
| 176 byte_range_.first_byte_position() + 1; | 167 byte_range_.first_byte_position() + 1; |
| 177 DCHECK_GE(remaining_bytes_, 0); | 168 DCHECK_GE(remaining_bytes_, 0); |
| 178 | 169 |
| 179 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { | 170 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { |
| 180 int rv = stream_->Seek(byte_range_.first_byte_position(), | 171 int rv = stream_->Seek(byte_range_.first_byte_position(), |
| 181 base::Bind(&URLRequestContentJob::DidSeek, | 172 base::Bind(&URLRequestContentJob::DidSeek, |
| 182 weak_ptr_factory_.GetWeakPtr())); | 173 weak_ptr_factory_.GetWeakPtr())); |
| 183 if (rv != net::ERR_IO_PENDING) { | 174 if (rv != net::ERR_IO_PENDING) { |
| 184 // stream_->Seek() failed, so pass an intentionally erroneous value | 175 // stream_->Seek() failed, so pass an intentionally erroneous value |
| 185 // into DidSeek(). | 176 // into DidSeek(). |
| 186 DidSeek(-1); | 177 DidSeek(-1); |
| 187 } | 178 } |
| 188 } else { | 179 } else { |
| 189 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() | 180 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() |
| 190 // the value that would mean seek success. This way we skip the code | 181 // the value that would mean seek success. This way we skip the code |
| 191 // handling seek failure. | 182 // handling seek failure. |
| 192 DidSeek(byte_range_.first_byte_position()); | 183 DidSeek(byte_range_.first_byte_position()); |
| 193 } | 184 } |
| 194 } | 185 } |
| 195 | 186 |
| 196 void URLRequestContentJob::DidSeek(int64 result) { | 187 void URLRequestContentJob::DidSeek(int64 result) { |
| 197 if (result != byte_range_.first_byte_position()) { | 188 if (result != byte_range_.first_byte_position()) { |
| 198 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 189 NotifyStartError(net::URLRequestStatus( |
| 199 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 190 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 200 return; | 191 return; |
| 201 } | 192 } |
| 202 | 193 |
| 203 set_expected_content_size(remaining_bytes_); | 194 set_expected_content_size(remaining_bytes_); |
| 204 NotifyHeadersComplete(); | 195 NotifyHeadersComplete(); |
| 205 } | 196 } |
| 206 | 197 |
| 207 void URLRequestContentJob::DidRead( | 198 void URLRequestContentJob::DidRead(int result) { |
| 208 scoped_refptr<net::IOBuffer> buf, int result) { | 199 DCHECK(io_pending_); |
| 200 io_pending_ = false; |
| 201 |
| 209 if (result > 0) { | 202 if (result > 0) { |
| 210 SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status | |
| 211 remaining_bytes_ -= result; | 203 remaining_bytes_ -= result; |
| 212 DCHECK_GE(remaining_bytes_, 0); | 204 DCHECK_GE(remaining_bytes_, 0); |
| 213 } | 205 } |
| 214 | 206 |
| 215 DCHECK(io_pending_); | 207 ReadRawDataComplete(result); |
| 216 io_pending_ = false; | |
| 217 | |
| 218 if (result == 0) { | |
| 219 NotifyDone(net::URLRequestStatus()); | |
| 220 } else if (result < 0) { | |
| 221 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | |
| 222 } | |
| 223 | |
| 224 NotifyReadComplete(result); | |
| 225 } | 208 } |
| 226 | 209 |
| 227 } // namespace content | 210 } // namespace content |
| OLD | NEW |