| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/test/net/url_request_slow_download_job.h" | 5 #include "content/test/net/url_request_slow_download_job.h" |
| 6 | 6 |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" |
| 9 #include "base/logging.h" | 9 #include "base/logging.h" |
| 10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 99 net::URLRequest* request, net::NetworkDelegate* network_delegate) | 99 net::URLRequest* request, net::NetworkDelegate* network_delegate) |
| 100 : net::URLRequestJob(request, network_delegate), | 100 : net::URLRequestJob(request, network_delegate), |
| 101 bytes_already_sent_(0), | 101 bytes_already_sent_(0), |
| 102 should_error_download_(false), | 102 should_error_download_(false), |
| 103 should_finish_download_(false), | 103 should_finish_download_(false), |
| 104 buffer_size_(0), | 104 buffer_size_(0), |
| 105 weak_factory_(this) { | 105 weak_factory_(this) { |
| 106 } | 106 } |
| 107 | 107 |
| 108 void URLRequestSlowDownloadJob::StartAsync() { | 108 void URLRequestSlowDownloadJob::StartAsync() { |
| 109 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, | 109 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) |
| 110 request_->url().spec().c_str())) | |
| 111 URLRequestSlowDownloadJob::FinishPendingRequests(); | 110 URLRequestSlowDownloadJob::FinishPendingRequests(); |
| 112 if (base::LowerCaseEqualsASCII(kErrorDownloadUrl, | 111 if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) |
| 113 request_->url().spec().c_str())) | |
| 114 URLRequestSlowDownloadJob::ErrorPendingRequests(); | 112 URLRequestSlowDownloadJob::ErrorPendingRequests(); |
| 115 | 113 |
| 116 NotifyHeadersComplete(); | 114 NotifyHeadersComplete(); |
| 117 } | 115 } |
| 118 | 116 |
| 119 // ReadRawData and CheckDoneStatus together implement a state | 117 // ReadRawData and CheckDoneStatus together implement a state |
| 120 // machine. ReadRawData may be called arbitrarily by the network stack. | 118 // machine. ReadRawData may be called arbitrarily by the network stack. |
| 121 // It responds by: | 119 // It responds by: |
| 122 // * If there are bytes remaining in the first chunk, they are | 120 // * If there are bytes remaining in the first chunk, they are |
| 123 // returned. | 121 // returned. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 162 *bytes_written = bytes_to_write; | 160 *bytes_written = bytes_to_write; |
| 163 bytes_already_sent_ += bytes_to_write; | 161 bytes_already_sent_ += bytes_to_write; |
| 164 return BUFFER_FILLED; | 162 return BUFFER_FILLED; |
| 165 } | 163 } |
| 166 | 164 |
| 167 return REQUEST_COMPLETE; | 165 return REQUEST_COMPLETE; |
| 168 } | 166 } |
| 169 | 167 |
| 170 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, | 168 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
| 171 int* bytes_read) { | 169 int* bytes_read) { |
| 172 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, | 170 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 173 request_->url().spec().c_str()) || | 171 request_->url().spec().c_str()) || |
| 174 base::LowerCaseEqualsASCII(kErrorDownloadUrl, | 172 LowerCaseEqualsASCII(kErrorDownloadUrl, |
| 175 request_->url().spec().c_str())) { | 173 request_->url().spec().c_str())) { |
| 176 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; | 174 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; |
| 177 *bytes_read = 0; | 175 *bytes_read = 0; |
| 178 return true; | 176 return true; |
| 179 } | 177 } |
| 180 | 178 |
| 181 VLOG(10) << __FUNCTION__ << " called at position " | 179 VLOG(10) << __FUNCTION__ << " called at position " |
| 182 << bytes_already_sent_ << " in the stream."; | 180 << bytes_already_sent_ << " in the stream."; |
| 183 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); | 181 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); |
| 184 switch (status) { | 182 switch (status) { |
| 185 case BUFFER_FILLED: | 183 case BUFFER_FILLED: |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 235 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { | 233 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { |
| 236 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 234 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
| 237 pending_requests_.Get().erase(this); | 235 pending_requests_.Get().erase(this); |
| 238 } | 236 } |
| 239 | 237 |
| 240 // Private const version. | 238 // Private const version. |
| 241 void URLRequestSlowDownloadJob::GetResponseInfoConst( | 239 void URLRequestSlowDownloadJob::GetResponseInfoConst( |
| 242 net::HttpResponseInfo* info) const { | 240 net::HttpResponseInfo* info) const { |
| 243 // Send back mock headers. | 241 // Send back mock headers. |
| 244 std::string raw_headers; | 242 std::string raw_headers; |
| 245 if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, | 243 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 246 request_->url().spec().c_str()) || | 244 request_->url().spec().c_str()) || |
| 247 base::LowerCaseEqualsASCII(kErrorDownloadUrl, | 245 LowerCaseEqualsASCII(kErrorDownloadUrl, |
| 248 request_->url().spec().c_str())) { | 246 request_->url().spec().c_str())) { |
| 249 raw_headers.append( | 247 raw_headers.append( |
| 250 "HTTP/1.1 200 OK\n" | 248 "HTTP/1.1 200 OK\n" |
| 251 "Content-type: text/plain\n"); | 249 "Content-type: text/plain\n"); |
| 252 } else { | 250 } else { |
| 253 raw_headers.append( | 251 raw_headers.append( |
| 254 "HTTP/1.1 200 OK\n" | 252 "HTTP/1.1 200 OK\n" |
| 255 "Content-type: application/octet-stream\n" | 253 "Content-type: application/octet-stream\n" |
| 256 "Cache-Control: max-age=0\n"); | 254 "Cache-Control: max-age=0\n"); |
| 257 | 255 |
| 258 if (base::LowerCaseEqualsASCII(kKnownSizeUrl, | 256 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { |
| 259 request_->url().spec().c_str())) { | |
| 260 raw_headers.append(base::StringPrintf( | 257 raw_headers.append(base::StringPrintf( |
| 261 "Content-Length: %d\n", | 258 "Content-Length: %d\n", |
| 262 kFirstDownloadSize + kSecondDownloadSize)); | 259 kFirstDownloadSize + kSecondDownloadSize)); |
| 263 } | 260 } |
| 264 } | 261 } |
| 265 | 262 |
| 266 // ParseRawHeaders expects \0 to end each header line. | 263 // ParseRawHeaders expects \0 to end each header line. |
| 267 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | 264 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
| 268 info->headers = new net::HttpResponseHeaders(raw_headers); | 265 info->headers = new net::HttpResponseHeaders(raw_headers); |
| 269 } | 266 } |
| 270 | 267 |
| 271 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | 268 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { |
| 272 net::HttpResponseInfo info; | 269 net::HttpResponseInfo info; |
| 273 GetResponseInfoConst(&info); | 270 GetResponseInfoConst(&info); |
| 274 return info.headers.get() && info.headers->GetMimeType(mime_type); | 271 return info.headers.get() && info.headers->GetMimeType(mime_type); |
| 275 } | 272 } |
| 276 | 273 |
| 277 } // namespace content | 274 } // namespace content |
| OLD | NEW |