OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/test/net/url_request_slow_download_job.h" | 5 #include "content/test/net/url_request_slow_download_job.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" |
9 #include "base/logging.h" | 9 #include "base/logging.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
99 net::URLRequest* request, net::NetworkDelegate* network_delegate) | 99 net::URLRequest* request, net::NetworkDelegate* network_delegate) |
100 : net::URLRequestJob(request, network_delegate), | 100 : net::URLRequestJob(request, network_delegate), |
101 bytes_already_sent_(0), | 101 bytes_already_sent_(0), |
102 should_error_download_(false), | 102 should_error_download_(false), |
103 should_finish_download_(false), | 103 should_finish_download_(false), |
104 buffer_size_(0), | 104 buffer_size_(0), |
105 weak_factory_(this) { | 105 weak_factory_(this) { |
106 } | 106 } |
107 | 107 |
108 void URLRequestSlowDownloadJob::StartAsync() { | 108 void URLRequestSlowDownloadJob::StartAsync() { |
109 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) | 109 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 110 request_->url().spec().c_str())) |
110 URLRequestSlowDownloadJob::FinishPendingRequests(); | 111 URLRequestSlowDownloadJob::FinishPendingRequests(); |
111 if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) | 112 if (base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
| 113 request_->url().spec().c_str())) |
112 URLRequestSlowDownloadJob::ErrorPendingRequests(); | 114 URLRequestSlowDownloadJob::ErrorPendingRequests(); |
113 | 115 |
114 NotifyHeadersComplete(); | 116 NotifyHeadersComplete(); |
115 } | 117 } |
116 | 118 |
117 // ReadRawData and CheckDoneStatus together implement a state | 119 // ReadRawData and CheckDoneStatus together implement a state |
118 // machine. ReadRawData may be called arbitrarily by the network stack. | 120 // machine. ReadRawData may be called arbitrarily by the network stack. |
119 // It responds by: | 121 // It responds by: |
120 // * If there are bytes remaining in the first chunk, they are | 122 // * If there are bytes remaining in the first chunk, they are |
121 // returned. | 123 // returned. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
160 *bytes_written = bytes_to_write; | 162 *bytes_written = bytes_to_write; |
161 bytes_already_sent_ += bytes_to_write; | 163 bytes_already_sent_ += bytes_to_write; |
162 return BUFFER_FILLED; | 164 return BUFFER_FILLED; |
163 } | 165 } |
164 | 166 |
165 return REQUEST_COMPLETE; | 167 return REQUEST_COMPLETE; |
166 } | 168 } |
167 | 169 |
168 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, | 170 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
169 int* bytes_read) { | 171 int* bytes_read) { |
170 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 172 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
171 request_->url().spec().c_str()) || | 173 request_->url().spec().c_str()) || |
172 LowerCaseEqualsASCII(kErrorDownloadUrl, | 174 base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
173 request_->url().spec().c_str())) { | 175 request_->url().spec().c_str())) { |
174 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; | 176 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; |
175 *bytes_read = 0; | 177 *bytes_read = 0; |
176 return true; | 178 return true; |
177 } | 179 } |
178 | 180 |
179 VLOG(10) << __FUNCTION__ << " called at position " | 181 VLOG(10) << __FUNCTION__ << " called at position " |
180 << bytes_already_sent_ << " in the stream."; | 182 << bytes_already_sent_ << " in the stream."; |
181 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); | 183 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); |
182 switch (status) { | 184 switch (status) { |
183 case BUFFER_FILLED: | 185 case BUFFER_FILLED: |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
233 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { | 235 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { |
234 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 236 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
235 pending_requests_.Get().erase(this); | 237 pending_requests_.Get().erase(this); |
236 } | 238 } |
237 | 239 |
238 // Private const version. | 240 // Private const version. |
239 void URLRequestSlowDownloadJob::GetResponseInfoConst( | 241 void URLRequestSlowDownloadJob::GetResponseInfoConst( |
240 net::HttpResponseInfo* info) const { | 242 net::HttpResponseInfo* info) const { |
241 // Send back mock headers. | 243 // Send back mock headers. |
242 std::string raw_headers; | 244 std::string raw_headers; |
243 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 245 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
244 request_->url().spec().c_str()) || | 246 request_->url().spec().c_str()) || |
245 LowerCaseEqualsASCII(kErrorDownloadUrl, | 247 base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
246 request_->url().spec().c_str())) { | 248 request_->url().spec().c_str())) { |
247 raw_headers.append( | 249 raw_headers.append( |
248 "HTTP/1.1 200 OK\n" | 250 "HTTP/1.1 200 OK\n" |
249 "Content-type: text/plain\n"); | 251 "Content-type: text/plain\n"); |
250 } else { | 252 } else { |
251 raw_headers.append( | 253 raw_headers.append( |
252 "HTTP/1.1 200 OK\n" | 254 "HTTP/1.1 200 OK\n" |
253 "Content-type: application/octet-stream\n" | 255 "Content-type: application/octet-stream\n" |
254 "Cache-Control: max-age=0\n"); | 256 "Cache-Control: max-age=0\n"); |
255 | 257 |
256 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { | 258 if (base::LowerCaseEqualsASCII(kKnownSizeUrl, |
| 259 request_->url().spec().c_str())) { |
257 raw_headers.append(base::StringPrintf( | 260 raw_headers.append(base::StringPrintf( |
258 "Content-Length: %d\n", | 261 "Content-Length: %d\n", |
259 kFirstDownloadSize + kSecondDownloadSize)); | 262 kFirstDownloadSize + kSecondDownloadSize)); |
260 } | 263 } |
261 } | 264 } |
262 | 265 |
263 // ParseRawHeaders expects \0 to end each header line. | 266 // ParseRawHeaders expects \0 to end each header line. |
264 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | 267 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
265 info->headers = new net::HttpResponseHeaders(raw_headers); | 268 info->headers = new net::HttpResponseHeaders(raw_headers); |
266 } | 269 } |
267 | 270 |
268 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | 271 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { |
269 net::HttpResponseInfo info; | 272 net::HttpResponseInfo info; |
270 GetResponseInfoConst(&info); | 273 GetResponseInfoConst(&info); |
271 return info.headers.get() && info.headers->GetMimeType(mime_type); | 274 return info.headers.get() && info.headers->GetMimeType(mime_type); |
272 } | 275 } |
273 | 276 |
274 } // namespace content | 277 } // namespace content |
OLD | NEW |