| OLD | NEW |
| 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/browser/android/url_request_content_job.h" | 5 #include "content/browser/android/url_request_content_job.h" |
| 6 | 6 |
| 7 #include "base/android/content_uri_utils.h" | 7 #include "base/android/content_uri_utils.h" |
| 8 #include "base/bind.h" | 8 #include "base/bind.h" |
| 9 #include "base/files/file_util.h" | 9 #include "base/files/file_util.h" |
| 10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| (...skipping 16 matching lines...) Expand all Loading... |
| 27 | 27 |
| 28 URLRequestContentJob::URLRequestContentJob( | 28 URLRequestContentJob::URLRequestContentJob( |
| 29 net::URLRequest* request, | 29 net::URLRequest* request, |
| 30 net::NetworkDelegate* network_delegate, | 30 net::NetworkDelegate* network_delegate, |
| 31 const base::FilePath& content_path, | 31 const base::FilePath& content_path, |
| 32 const scoped_refptr<base::TaskRunner>& content_task_runner) | 32 const scoped_refptr<base::TaskRunner>& content_task_runner) |
| 33 : net::URLRequestJob(request, network_delegate), | 33 : net::URLRequestJob(request, network_delegate), |
| 34 content_path_(content_path), | 34 content_path_(content_path), |
| 35 stream_(new net::FileStream(content_task_runner)), | 35 stream_(new net::FileStream(content_task_runner)), |
| 36 content_task_runner_(content_task_runner), | 36 content_task_runner_(content_task_runner), |
| 37 range_parse_result_(net::OK), |
| 37 remaining_bytes_(0), | 38 remaining_bytes_(0), |
| 38 io_pending_(false), | 39 io_pending_(false), |
| 39 weak_ptr_factory_(this) {} | 40 weak_ptr_factory_(this) {} |
| 40 | 41 |
| 41 void URLRequestContentJob::Start() { | 42 void URLRequestContentJob::Start() { |
| 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); | 43 ContentMetaInfo* meta_info = new ContentMetaInfo(); |
| 43 content_task_runner_->PostTaskAndReply( | 44 content_task_runner_->PostTaskAndReply( |
| 44 FROM_HERE, | 45 FROM_HERE, |
| 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, | 46 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, |
| 46 base::Unretained(meta_info)), | 47 base::Unretained(meta_info)), |
| 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, | 48 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, |
| 48 weak_ptr_factory_.GetWeakPtr(), | 49 weak_ptr_factory_.GetWeakPtr(), |
| 49 base::Owned(meta_info))); | 50 base::Owned(meta_info))); |
| 50 } | 51 } |
| 51 | 52 |
| 52 void URLRequestContentJob::Kill() { | 53 void URLRequestContentJob::Kill() { |
| 53 stream_.reset(); | 54 stream_.reset(); |
| 54 weak_ptr_factory_.InvalidateWeakPtrs(); | 55 weak_ptr_factory_.InvalidateWeakPtrs(); |
| 55 | 56 |
| 56 net::URLRequestJob::Kill(); | 57 net::URLRequestJob::Kill(); |
| 57 } | 58 } |
| 58 | 59 |
| 59 bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest, | 60 int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) { |
| 60 int dest_size, | |
| 61 int* bytes_read) { | |
| 62 DCHECK_GT(dest_size, 0); | 61 DCHECK_GT(dest_size, 0); |
| 63 DCHECK(bytes_read); | |
| 64 DCHECK_GE(remaining_bytes_, 0); | 62 DCHECK_GE(remaining_bytes_, 0); |
| 65 | 63 |
| 66 if (remaining_bytes_ < dest_size) | 64 if (remaining_bytes_ < dest_size) |
| 67 dest_size = static_cast<int>(remaining_bytes_); | 65 dest_size = remaining_bytes_; |
| 68 | 66 |
| 69 // If we should copy zero bytes because |remaining_bytes_| is zero, short | 67 // If we should copy zero bytes because |remaining_bytes_| is zero, short |
| 70 // circuit here. | 68 // circuit here. |
| 71 if (!dest_size) { | 69 if (!dest_size) |
| 72 *bytes_read = 0; | 70 return 0; |
| 73 return true; | |
| 74 } | |
| 75 | 71 |
| 76 int rv = stream_->Read(dest, | 72 int rv = |
| 77 dest_size, | 73 stream_->Read(dest, dest_size, base::Bind(&URLRequestContentJob::DidRead, |
| 78 base::Bind(&URLRequestContentJob::DidRead, | 74 weak_ptr_factory_.GetWeakPtr(), |
| 79 weak_ptr_factory_.GetWeakPtr(), | 75 make_scoped_refptr(dest))); |
| 80 make_scoped_refptr(dest))); | |
| 81 if (rv >= 0) { | |
| 82 // Data is immediately available. | |
| 83 *bytes_read = rv; | |
| 84 remaining_bytes_ -= rv; | |
| 85 DCHECK_GE(remaining_bytes_, 0); | |
| 86 return true; | |
| 87 } | |
| 88 | |
| 89 // Otherwise, a read error occured. We may just need to wait... | |
| 90 if (rv == net::ERR_IO_PENDING) { | 76 if (rv == net::ERR_IO_PENDING) { |
| 91 io_pending_ = true; | 77 io_pending_ = true; |
| 92 SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); | 78 } else if (rv > 0) { |
| 93 } else { | 79 remaining_bytes_ -= rv; |
| 94 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); | |
| 95 } | 80 } |
| 96 return false; | 81 DCHECK_GE(remaining_bytes_, 0); |
| 82 return rv; |
| 97 } | 83 } |
| 98 | 84 |
| 99 bool URLRequestContentJob::IsRedirectResponse(GURL* location, | 85 bool URLRequestContentJob::IsRedirectResponse(GURL* location, |
| 100 int* http_status_code) { | 86 int* http_status_code) { |
| 101 return false; | 87 return false; |
| 102 } | 88 } |
| 103 | 89 |
| 104 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { | 90 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { |
| 105 DCHECK(request_); | 91 DCHECK(request_); |
| 106 if (!meta_info_.mime_type.empty()) { | 92 if (!meta_info_.mime_type.empty()) { |
| 107 *mime_type = meta_info_.mime_type; | 93 *mime_type = meta_info_.mime_type; |
| 108 return true; | 94 return true; |
| 109 } | 95 } |
| 110 return false; | 96 return false; |
| 111 } | 97 } |
| 112 | 98 |
| 99 // Extracts headers that this job cares about from the supplied request headers. |
| 100 // Currently this job only cares about the Range header. Note that validation is |
| 101 // deferred to DidOpen(), because NotifyStartError is not legal to call since |
| 102 // the job has not started. |
| 113 void URLRequestContentJob::SetExtraRequestHeaders( | 103 void URLRequestContentJob::SetExtraRequestHeaders( |
| 114 const net::HttpRequestHeaders& headers) { | 104 const net::HttpRequestHeaders& headers) { |
| 115 std::string range_header; | 105 std::string range_header; |
| 116 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) | 106 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) |
| 117 return; | 107 return; |
| 118 | 108 |
| 119 // We only care about "Range" header here. | 109 // We only care about "Range" header here. |
| 120 std::vector<net::HttpByteRange> ranges; | 110 std::vector<net::HttpByteRange> ranges; |
| 121 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { | 111 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { |
| 122 if (ranges.size() == 1) { | 112 if (ranges.size() == 1) { |
| 123 byte_range_ = ranges[0]; | 113 byte_range_ = ranges[0]; |
| 124 } else { | 114 } else { |
| 125 // We don't support multiple range requests. | 115 // We don't support multiple range requests. |
| 126 NotifyDone(net::URLRequestStatus( | 116 // Saves the failure and report it in DidOpen(). |
| 127 net::URLRequestStatus::FAILED, | 117 range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; |
| 128 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | |
| 129 } | 118 } |
| 130 } | 119 } |
| 131 } | 120 } |
| 132 | 121 |
| 133 URLRequestContentJob::~URLRequestContentJob() {} | 122 URLRequestContentJob::~URLRequestContentJob() {} |
| 134 | 123 |
| 135 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, | 124 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, |
| 136 ContentMetaInfo* meta_info) { | 125 ContentMetaInfo* meta_info) { |
| 137 base::File::Info file_info; | 126 base::File::Info file_info; |
| 138 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); | 127 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 155 base::File::FLAG_ASYNC; | 144 base::File::FLAG_ASYNC; |
| 156 int rv = stream_->Open(content_path_, flags, | 145 int rv = stream_->Open(content_path_, flags, |
| 157 base::Bind(&URLRequestContentJob::DidOpen, | 146 base::Bind(&URLRequestContentJob::DidOpen, |
| 158 weak_ptr_factory_.GetWeakPtr())); | 147 weak_ptr_factory_.GetWeakPtr())); |
| 159 if (rv != net::ERR_IO_PENDING) | 148 if (rv != net::ERR_IO_PENDING) |
| 160 DidOpen(rv); | 149 DidOpen(rv); |
| 161 } | 150 } |
| 162 | 151 |
| 163 void URLRequestContentJob::DidOpen(int result) { | 152 void URLRequestContentJob::DidOpen(int result) { |
| 164 if (result != net::OK) { | 153 if (result != net::OK) { |
| 165 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | 154 NotifyStartError( |
| 155 net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| 156 return; |
| 157 } |
| 158 |
| 159 if (range_parse_result_ != net::OK) { |
| 160 NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| 161 range_parse_result_)); |
| 166 return; | 162 return; |
| 167 } | 163 } |
| 168 | 164 |
| 169 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { | 165 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { |
| 170 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 166 NotifyStartError(net::URLRequestStatus( |
| 171 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 167 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 172 return; | 168 return; |
| 173 } | 169 } |
| 174 | 170 |
| 175 remaining_bytes_ = byte_range_.last_byte_position() - | 171 remaining_bytes_ = byte_range_.last_byte_position() - |
| 176 byte_range_.first_byte_position() + 1; | 172 byte_range_.first_byte_position() + 1; |
| 177 DCHECK_GE(remaining_bytes_, 0); | 173 DCHECK_GE(remaining_bytes_, 0); |
| 178 | 174 |
| 179 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { | 175 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { |
| 180 int rv = stream_->Seek(byte_range_.first_byte_position(), | 176 int rv = stream_->Seek(byte_range_.first_byte_position(), |
| 181 base::Bind(&URLRequestContentJob::DidSeek, | 177 base::Bind(&URLRequestContentJob::DidSeek, |
| 182 weak_ptr_factory_.GetWeakPtr())); | 178 weak_ptr_factory_.GetWeakPtr())); |
| 183 if (rv != net::ERR_IO_PENDING) { | 179 if (rv != net::ERR_IO_PENDING) { |
| 184 // stream_->Seek() failed, so pass an intentionally erroneous value | 180 // stream_->Seek() failed, so pass an intentionally erroneous value |
| 185 // into DidSeek(). | 181 // into DidSeek(). |
| 186 DidSeek(-1); | 182 DidSeek(-1); |
| 187 } | 183 } |
| 188 } else { | 184 } else { |
| 189 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() | 185 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() |
| 190 // the value that would mean seek success. This way we skip the code | 186 // the value that would mean seek success. This way we skip the code |
| 191 // handling seek failure. | 187 // handling seek failure. |
| 192 DidSeek(byte_range_.first_byte_position()); | 188 DidSeek(byte_range_.first_byte_position()); |
| 193 } | 189 } |
| 194 } | 190 } |
| 195 | 191 |
| 196 void URLRequestContentJob::DidSeek(int64 result) { | 192 void URLRequestContentJob::DidSeek(int64 result) { |
| 197 if (result != byte_range_.first_byte_position()) { | 193 if (result != byte_range_.first_byte_position()) { |
| 198 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, | 194 NotifyStartError(net::URLRequestStatus( |
| 199 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 195 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
| 200 return; | 196 return; |
| 201 } | 197 } |
| 202 | 198 |
| 203 set_expected_content_size(remaining_bytes_); | 199 set_expected_content_size(remaining_bytes_); |
| 204 NotifyHeadersComplete(); | 200 NotifyHeadersComplete(); |
| 205 } | 201 } |
| 206 | 202 |
| 207 void URLRequestContentJob::DidRead( | 203 void URLRequestContentJob::DidRead(scoped_refptr<net::IOBuffer> buf, |
| 208 scoped_refptr<net::IOBuffer> buf, int result) { | 204 int result) { |
| 205 DCHECK(io_pending_); |
| 206 io_pending_ = false; |
| 207 |
| 209 if (result > 0) { | 208 if (result > 0) { |
| 210 SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status | |
| 211 remaining_bytes_ -= result; | 209 remaining_bytes_ -= result; |
| 212 DCHECK_GE(remaining_bytes_, 0); | 210 DCHECK_GE(remaining_bytes_, 0); |
| 213 } | 211 } |
| 214 | 212 |
| 215 DCHECK(io_pending_); | 213 ReadRawDataComplete(result); |
| 216 io_pending_ = false; | |
| 217 | |
| 218 if (result == 0) { | |
| 219 NotifyDone(net::URLRequestStatus()); | |
| 220 } else if (result < 0) { | |
| 221 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | |
| 222 } | |
| 223 | |
| 224 NotifyReadComplete(result); | |
| 225 } | 214 } |
| 226 | 215 |
| 227 } // namespace content | 216 } // namespace content |
| OLD | NEW |