OLD | NEW |
1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/android/url_request_content_job.h" | 5 #include "content/browser/android/url_request_content_job.h" |
6 | 6 |
7 #include "base/android/content_uri_utils.h" | 7 #include "base/android/content_uri_utils.h" |
8 #include "base/bind.h" | 8 #include "base/bind.h" |
9 #include "base/files/file_util.h" | 9 #include "base/files/file_util.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
11 #include "base/task_runner.h" | 11 #include "base/task_runner.h" |
12 #include "net/base/file_stream.h" | 12 #include "net/base/file_stream.h" |
13 #include "net/base/io_buffer.h" | 13 #include "net/base/io_buffer.h" |
| 14 #include "net/base/net_errors.h" |
14 #include "net/http/http_util.h" | 15 #include "net/http/http_util.h" |
15 #include "net/url_request/url_request_error_job.h" | 16 #include "net/url_request/url_request_error_job.h" |
16 #include "url/gurl.h" | 17 #include "url/gurl.h" |
17 | 18 |
18 namespace content { | 19 namespace content { |
19 | 20 |
20 // TODO(qinmin): Refactor this class to reuse the common code in | 21 // TODO(qinmin): Refactor this class to reuse the common code in |
21 // url_request_file_job.cc. | 22 // url_request_file_job.cc. |
22 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() | 23 URLRequestContentJob::ContentMetaInfo::ContentMetaInfo() |
23 : content_exists(false), | 24 : content_exists(false), |
24 content_size(0) { | 25 content_size(0) { |
25 } | 26 } |
26 | 27 |
27 URLRequestContentJob::URLRequestContentJob( | 28 URLRequestContentJob::URLRequestContentJob( |
28 net::URLRequest* request, | 29 net::URLRequest* request, |
29 net::NetworkDelegate* network_delegate, | 30 net::NetworkDelegate* network_delegate, |
30 const base::FilePath& content_path, | 31 const base::FilePath& content_path, |
31 const scoped_refptr<base::TaskRunner>& content_task_runner) | 32 const scoped_refptr<base::TaskRunner>& content_task_runner) |
32 : net::URLRequestJob(request, network_delegate), | 33 : net::URLRequestJob(request, network_delegate), |
33 content_path_(content_path), | 34 content_path_(content_path), |
34 stream_(new net::FileStream(content_task_runner)), | 35 stream_(new net::FileStream(content_task_runner)), |
35 content_task_runner_(content_task_runner), | 36 content_task_runner_(content_task_runner), |
36 range_parse_result_(net::OK), | |
37 remaining_bytes_(0), | 37 remaining_bytes_(0), |
38 io_pending_(false), | 38 io_pending_(false), |
39 weak_ptr_factory_(this) {} | 39 weak_ptr_factory_(this) {} |
40 | 40 |
41 void URLRequestContentJob::Start() { | 41 void URLRequestContentJob::Start() { |
42 ContentMetaInfo* meta_info = new ContentMetaInfo(); | 42 ContentMetaInfo* meta_info = new ContentMetaInfo(); |
43 content_task_runner_->PostTaskAndReply( | 43 content_task_runner_->PostTaskAndReply( |
44 FROM_HERE, | 44 FROM_HERE, |
45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, | 45 base::Bind(&URLRequestContentJob::FetchMetaInfo, content_path_, |
46 base::Unretained(meta_info)), | 46 base::Unretained(meta_info)), |
47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, | 47 base::Bind(&URLRequestContentJob::DidFetchMetaInfo, |
48 weak_ptr_factory_.GetWeakPtr(), | 48 weak_ptr_factory_.GetWeakPtr(), |
49 base::Owned(meta_info))); | 49 base::Owned(meta_info))); |
50 } | 50 } |
51 | 51 |
52 void URLRequestContentJob::Kill() { | 52 void URLRequestContentJob::Kill() { |
53 stream_.reset(); | 53 stream_.reset(); |
54 weak_ptr_factory_.InvalidateWeakPtrs(); | 54 weak_ptr_factory_.InvalidateWeakPtrs(); |
55 | 55 |
56 net::URLRequestJob::Kill(); | 56 net::URLRequestJob::Kill(); |
57 } | 57 } |
58 | 58 |
59 int URLRequestContentJob::ReadRawData(net::IOBuffer* dest, int dest_size) { | 59 bool URLRequestContentJob::ReadRawData(net::IOBuffer* dest, |
| 60 int dest_size, |
| 61 int* bytes_read) { |
60 DCHECK_GT(dest_size, 0); | 62 DCHECK_GT(dest_size, 0); |
| 63 DCHECK(bytes_read); |
61 DCHECK_GE(remaining_bytes_, 0); | 64 DCHECK_GE(remaining_bytes_, 0); |
62 | 65 |
63 if (remaining_bytes_ < dest_size) | 66 if (remaining_bytes_ < dest_size) |
64 dest_size = remaining_bytes_; | 67 dest_size = static_cast<int>(remaining_bytes_); |
65 | 68 |
66 // If we should copy zero bytes because |remaining_bytes_| is zero, short | 69 // If we should copy zero bytes because |remaining_bytes_| is zero, short |
67 // circuit here. | 70 // circuit here. |
68 if (!dest_size) | 71 if (!dest_size) { |
69 return 0; | 72 *bytes_read = 0; |
| 73 return true; |
| 74 } |
70 | 75 |
71 int rv = stream_->Read(dest, dest_size, | 76 int rv = |
72 base::Bind(&URLRequestContentJob::DidRead, | 77 stream_->Read(dest, dest_size, base::Bind(&URLRequestContentJob::DidRead, |
73 weak_ptr_factory_.GetWeakPtr())); | 78 weak_ptr_factory_.GetWeakPtr(), |
| 79 make_scoped_refptr(dest))); |
| 80 if (rv >= 0) { |
| 81 // Data is immediately available. |
| 82 *bytes_read = rv; |
| 83 remaining_bytes_ -= rv; |
| 84 DCHECK_GE(remaining_bytes_, 0); |
| 85 return true; |
| 86 } |
| 87 |
| 88 // Otherwise, a read error occured. We may just need to wait... |
74 if (rv == net::ERR_IO_PENDING) { | 89 if (rv == net::ERR_IO_PENDING) { |
75 io_pending_ = true; | 90 io_pending_ = true; |
76 } else if (rv > 0) { | 91 SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); |
77 remaining_bytes_ -= rv; | 92 } else { |
| 93 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, rv)); |
78 } | 94 } |
79 DCHECK_GE(remaining_bytes_, 0); | 95 return false; |
80 return rv; | |
81 } | 96 } |
82 | 97 |
83 bool URLRequestContentJob::IsRedirectResponse(GURL* location, | 98 bool URLRequestContentJob::IsRedirectResponse(GURL* location, |
84 int* http_status_code) { | 99 int* http_status_code) { |
85 return false; | 100 return false; |
86 } | 101 } |
87 | 102 |
88 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { | 103 bool URLRequestContentJob::GetMimeType(std::string* mime_type) const { |
89 DCHECK(request_); | 104 DCHECK(request_); |
90 if (!meta_info_.mime_type.empty()) { | 105 if (!meta_info_.mime_type.empty()) { |
91 *mime_type = meta_info_.mime_type; | 106 *mime_type = meta_info_.mime_type; |
92 return true; | 107 return true; |
93 } | 108 } |
94 return false; | 109 return false; |
95 } | 110 } |
96 | 111 |
97 void URLRequestContentJob::SetExtraRequestHeaders( | 112 void URLRequestContentJob::SetExtraRequestHeaders( |
98 const net::HttpRequestHeaders& headers) { | 113 const net::HttpRequestHeaders& headers) { |
99 std::string range_header; | 114 std::string range_header; |
100 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) | 115 if (!headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) |
101 return; | 116 return; |
102 | 117 |
103 // Currently this job only cares about the Range header. Note that validation | 118 // We only care about "Range" header here. |
104 // is deferred to DidOpen(), because NotifyStartError is not legal to call | |
105 // since the job has not started. | |
106 std::vector<net::HttpByteRange> ranges; | 119 std::vector<net::HttpByteRange> ranges; |
107 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { | 120 if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { |
108 if (ranges.size() == 1) { | 121 if (ranges.size() == 1) { |
109 byte_range_ = ranges[0]; | 122 byte_range_ = ranges[0]; |
110 } else { | 123 } else { |
111 // We don't support multiple range requests. | 124 // We don't support multiple range requests. |
112 range_parse_result_ = net::ERR_REQUEST_RANGE_NOT_SATISFIABLE; | 125 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| 126 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
113 } | 127 } |
114 } | 128 } |
115 } | 129 } |
116 | 130 |
117 URLRequestContentJob::~URLRequestContentJob() {} | 131 URLRequestContentJob::~URLRequestContentJob() {} |
118 | 132 |
119 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, | 133 void URLRequestContentJob::FetchMetaInfo(const base::FilePath& content_path, |
120 ContentMetaInfo* meta_info) { | 134 ContentMetaInfo* meta_info) { |
121 base::File::Info file_info; | 135 base::File::Info file_info; |
122 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); | 136 meta_info->content_exists = base::GetFileInfo(content_path, &file_info); |
(...skipping 16 matching lines...) Expand all Loading... |
139 base::File::FLAG_ASYNC; | 153 base::File::FLAG_ASYNC; |
140 int rv = stream_->Open(content_path_, flags, | 154 int rv = stream_->Open(content_path_, flags, |
141 base::Bind(&URLRequestContentJob::DidOpen, | 155 base::Bind(&URLRequestContentJob::DidOpen, |
142 weak_ptr_factory_.GetWeakPtr())); | 156 weak_ptr_factory_.GetWeakPtr())); |
143 if (rv != net::ERR_IO_PENDING) | 157 if (rv != net::ERR_IO_PENDING) |
144 DidOpen(rv); | 158 DidOpen(rv); |
145 } | 159 } |
146 | 160 |
147 void URLRequestContentJob::DidOpen(int result) { | 161 void URLRequestContentJob::DidOpen(int result) { |
148 if (result != net::OK) { | 162 if (result != net::OK) { |
149 NotifyStartError( | 163 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
150 net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); | |
151 return; | |
152 } | |
153 | |
154 if (range_parse_result_ != net::OK) { | |
155 NotifyStartError(net::URLRequestStatus(net::URLRequestStatus::FAILED, | |
156 range_parse_result_)); | |
157 return; | 164 return; |
158 } | 165 } |
159 | 166 |
160 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { | 167 if (!byte_range_.ComputeBounds(meta_info_.content_size)) { |
161 NotifyStartError(net::URLRequestStatus( | 168 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
162 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 169 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
163 return; | 170 return; |
164 } | 171 } |
165 | 172 |
166 remaining_bytes_ = byte_range_.last_byte_position() - | 173 remaining_bytes_ = byte_range_.last_byte_position() - |
167 byte_range_.first_byte_position() + 1; | 174 byte_range_.first_byte_position() + 1; |
168 DCHECK_GE(remaining_bytes_, 0); | 175 DCHECK_GE(remaining_bytes_, 0); |
169 | 176 |
170 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { | 177 if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0) { |
171 int rv = stream_->Seek(byte_range_.first_byte_position(), | 178 int rv = stream_->Seek(byte_range_.first_byte_position(), |
172 base::Bind(&URLRequestContentJob::DidSeek, | 179 base::Bind(&URLRequestContentJob::DidSeek, |
173 weak_ptr_factory_.GetWeakPtr())); | 180 weak_ptr_factory_.GetWeakPtr())); |
174 if (rv != net::ERR_IO_PENDING) { | 181 if (rv != net::ERR_IO_PENDING) { |
175 // stream_->Seek() failed, so pass an intentionally erroneous value | 182 // stream_->Seek() failed, so pass an intentionally erroneous value |
176 // into DidSeek(). | 183 // into DidSeek(). |
177 DidSeek(-1); | 184 DidSeek(-1); |
178 } | 185 } |
179 } else { | 186 } else { |
180 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() | 187 // We didn't need to call stream_->Seek() at all, so we pass to DidSeek() |
181 // the value that would mean seek success. This way we skip the code | 188 // the value that would mean seek success. This way we skip the code |
182 // handling seek failure. | 189 // handling seek failure. |
183 DidSeek(byte_range_.first_byte_position()); | 190 DidSeek(byte_range_.first_byte_position()); |
184 } | 191 } |
185 } | 192 } |
186 | 193 |
187 void URLRequestContentJob::DidSeek(int64 result) { | 194 void URLRequestContentJob::DidSeek(int64 result) { |
188 if (result != byte_range_.first_byte_position()) { | 195 if (result != byte_range_.first_byte_position()) { |
189 NotifyStartError(net::URLRequestStatus( | 196 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
190 net::URLRequestStatus::FAILED, net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); | 197 net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); |
191 return; | 198 return; |
192 } | 199 } |
193 | 200 |
194 set_expected_content_size(remaining_bytes_); | 201 set_expected_content_size(remaining_bytes_); |
195 NotifyHeadersComplete(); | 202 NotifyHeadersComplete(); |
196 } | 203 } |
197 | 204 |
198 void URLRequestContentJob::DidRead(int result) { | 205 void URLRequestContentJob::DidRead(scoped_refptr<net::IOBuffer> buf, |
199 DCHECK(io_pending_); | 206 int result) { |
200 io_pending_ = false; | |
201 | |
202 if (result > 0) { | 207 if (result > 0) { |
| 208 SetStatus(net::URLRequestStatus()); // Clear the IO_PENDING status |
203 remaining_bytes_ -= result; | 209 remaining_bytes_ -= result; |
204 DCHECK_GE(remaining_bytes_, 0); | 210 DCHECK_GE(remaining_bytes_, 0); |
205 } | 211 } |
206 | 212 |
207 ReadRawDataComplete(result); | 213 DCHECK(io_pending_); |
| 214 io_pending_ = false; |
| 215 |
| 216 if (result == 0) { |
| 217 NotifyDone(net::URLRequestStatus()); |
| 218 } else if (result < 0) { |
| 219 NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, result)); |
| 220 } |
| 221 |
| 222 NotifyReadComplete(result); |
208 } | 223 } |
209 | 224 |
210 } // namespace content | 225 } // namespace content |
OLD | NEW |