OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/test/net/url_request_slow_download_job.h" | 5 #include "content/test/net/url_request_slow_download_job.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" |
9 #include "base/logging.h" | 9 #include "base/logging.h" |
10 #include "base/message_loop.h" | 10 #include "base/message_loop.h" |
11 #include "base/string_util.h" | 11 #include "base/string_util.h" |
12 #include "base/stringprintf.h" | 12 #include "base/stringprintf.h" |
13 #include "content/public/browser/browser_thread.h" | 13 #include "content/public/browser/browser_thread.h" |
14 #include "googleurl/src/gurl.h" | 14 #include "googleurl/src/gurl.h" |
15 #include "net/base/io_buffer.h" | 15 #include "net/base/io_buffer.h" |
| 16 #include "net/base/net_errors.h" |
16 #include "net/http/http_response_headers.h" | 17 #include "net/http/http_response_headers.h" |
17 #include "net/url_request/url_request.h" | 18 #include "net/url_request/url_request.h" |
18 #include "net/url_request/url_request_filter.h" | 19 #include "net/url_request/url_request_filter.h" |
19 | 20 |
20 namespace content { | 21 namespace content { |
21 | 22 |
22 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = | 23 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = |
23 "http://url.handled.by.slow.download/download-unknown-size"; | 24 "http://url.handled.by.slow.download/download-unknown-size"; |
24 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = | 25 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = |
25 "http://url.handled.by.slow.download/download-known-size"; | 26 "http://url.handled.by.slow.download/download-known-size"; |
26 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = | 27 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = |
27 "http://url.handled.by.slow.download/download-finish"; | 28 "http://url.handled.by.slow.download/download-finish"; |
| 29 const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] = |
| 30 "http://url.handled.by.slow.download/download-error"; |
28 | 31 |
29 const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35; | 32 const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35; |
30 const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10; | 33 const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10; |
31 | 34 |
32 // static | 35 // static |
33 base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky | 36 base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky |
34 URLRequestSlowDownloadJob::pending_requests_ = LAZY_INSTANCE_INITIALIZER; | 37 URLRequestSlowDownloadJob::pending_requests_ = LAZY_INSTANCE_INITIALIZER; |
35 | 38 |
36 void URLRequestSlowDownloadJob::Start() { | 39 void URLRequestSlowDownloadJob::Start() { |
37 MessageLoop::current()->PostTask( | 40 MessageLoop::current()->PostTask( |
38 FROM_HERE, | 41 FROM_HERE, |
39 base::Bind(&URLRequestSlowDownloadJob::StartAsync, | 42 base::Bind(&URLRequestSlowDownloadJob::StartAsync, |
40 weak_factory_.GetWeakPtr())); | 43 weak_factory_.GetWeakPtr())); |
41 } | 44 } |
42 | 45 |
43 // static | 46 // static |
44 void URLRequestSlowDownloadJob::AddUrlHandler() { | 47 void URLRequestSlowDownloadJob::AddUrlHandler() { |
45 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); | 48 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); |
46 filter->AddUrlHandler(GURL(kUnknownSizeUrl), | 49 filter->AddUrlHandler(GURL(kUnknownSizeUrl), |
47 &URLRequestSlowDownloadJob::Factory); | 50 &URLRequestSlowDownloadJob::Factory); |
48 filter->AddUrlHandler(GURL(kKnownSizeUrl), | 51 filter->AddUrlHandler(GURL(kKnownSizeUrl), |
49 &URLRequestSlowDownloadJob::Factory); | 52 &URLRequestSlowDownloadJob::Factory); |
50 filter->AddUrlHandler(GURL(kFinishDownloadUrl), | 53 filter->AddUrlHandler(GURL(kFinishDownloadUrl), |
51 &URLRequestSlowDownloadJob::Factory); | 54 &URLRequestSlowDownloadJob::Factory); |
| 55 filter->AddUrlHandler(GURL(kErrorDownloadUrl), |
| 56 &URLRequestSlowDownloadJob::Factory); |
52 } | 57 } |
53 | 58 |
54 // static | 59 // static |
55 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( | 60 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( |
56 net::URLRequest* request, | 61 net::URLRequest* request, |
57 net::NetworkDelegate* network_delegate, | 62 net::NetworkDelegate* network_delegate, |
58 const std::string& scheme) { | 63 const std::string& scheme) { |
59 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 64 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
60 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob( | 65 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob( |
61 request, network_delegate); | 66 request, network_delegate); |
62 if (request->url().spec() != kFinishDownloadUrl) | 67 if (request->url().spec() != kFinishDownloadUrl && |
| 68 request->url().spec() != kErrorDownloadUrl) |
63 pending_requests_.Get().insert(job); | 69 pending_requests_.Get().insert(job); |
64 return job; | 70 return job; |
65 } | 71 } |
66 | 72 |
67 // static | 73 // static |
68 size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() { | 74 size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() { |
69 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 75 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
70 return pending_requests_.Get().size(); | 76 return pending_requests_.Get().size(); |
71 } | 77 } |
72 | 78 |
73 // static | 79 // static |
74 void URLRequestSlowDownloadJob::FinishPendingRequests() { | 80 void URLRequestSlowDownloadJob::FinishPendingRequests() { |
75 typedef std::set<URLRequestSlowDownloadJob*> JobList; | 81 typedef std::set<URLRequestSlowDownloadJob*> JobList; |
76 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 82 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
77 for (JobList::iterator it = pending_requests_.Get().begin(); it != | 83 for (JobList::iterator it = pending_requests_.Get().begin(); it != |
78 pending_requests_.Get().end(); ++it) { | 84 pending_requests_.Get().end(); ++it) { |
79 (*it)->set_should_finish_download(); | 85 (*it)->set_should_finish_download(); |
80 } | 86 } |
81 } | 87 } |
82 | 88 |
| 89 void URLRequestSlowDownloadJob::ErrorPendingRequests() { |
| 90 typedef std::set<URLRequestSlowDownloadJob*> JobList; |
| 91 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
| 92 for (JobList::iterator it = pending_requests_.Get().begin(); it != |
| 93 pending_requests_.Get().end(); ++it) { |
| 94 (*it)->set_should_error_download(); |
| 95 } |
| 96 } |
| 97 |
83 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( | 98 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( |
84 net::URLRequest* request, net::NetworkDelegate* network_delegate) | 99 net::URLRequest* request, net::NetworkDelegate* network_delegate) |
85 : net::URLRequestJob(request, network_delegate), | 100 : net::URLRequestJob(request, network_delegate), |
86 bytes_already_sent_(0), | 101 bytes_already_sent_(0), |
| 102 should_error_download_(false), |
87 should_finish_download_(false), | 103 should_finish_download_(false), |
88 buffer_size_(0), | 104 buffer_size_(0), |
89 ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)) { | 105 ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)) { |
90 } | 106 } |
91 | 107 |
92 void URLRequestSlowDownloadJob::StartAsync() { | 108 void URLRequestSlowDownloadJob::StartAsync() { |
93 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) | 109 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) |
94 URLRequestSlowDownloadJob::FinishPendingRequests(); | 110 URLRequestSlowDownloadJob::FinishPendingRequests(); |
| 111 if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) |
| 112 URLRequestSlowDownloadJob::ErrorPendingRequests(); |
95 | 113 |
96 NotifyHeadersComplete(); | 114 NotifyHeadersComplete(); |
97 } | 115 } |
98 | 116 |
99 // ReadRawData and CheckDoneStatus together implement a state | 117 // ReadRawData and CheckDoneStatus together implement a state |
100 // machine. ReadRawData may be called arbitrarily by the network stack. | 118 // machine. ReadRawData may be called arbitrarily by the network stack. |
101 // It responds by: | 119 // It responds by: |
102 // * If there are bytes remaining in the first chunk, they are | 120 // * If there are bytes remaining in the first chunk, they are |
103 // returned. | 121 // returned. |
104 // [No bytes remaining in first chunk. ] | 122 // [No bytes remaining in first chunk. ] |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
143 bytes_already_sent_ += bytes_to_write; | 161 bytes_already_sent_ += bytes_to_write; |
144 return BUFFER_FILLED; | 162 return BUFFER_FILLED; |
145 } | 163 } |
146 | 164 |
147 return REQUEST_COMPLETE; | 165 return REQUEST_COMPLETE; |
148 } | 166 } |
149 | 167 |
150 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, | 168 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
151 int* bytes_read) { | 169 int* bytes_read) { |
152 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 170 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 171 request_->url().spec().c_str()) || |
| 172 LowerCaseEqualsASCII(kErrorDownloadUrl, |
153 request_->url().spec().c_str())) { | 173 request_->url().spec().c_str())) { |
154 VLOG(10) << __FUNCTION__ << " called w/ kFinishDownloadUrl."; | 174 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; |
155 *bytes_read = 0; | 175 *bytes_read = 0; |
156 return true; | 176 return true; |
157 } | 177 } |
158 | 178 |
159 VLOG(10) << __FUNCTION__ << " called at position " | 179 VLOG(10) << __FUNCTION__ << " called at position " |
160 << bytes_already_sent_ << " in the stream."; | 180 << bytes_already_sent_ << " in the stream."; |
161 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); | 181 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); |
162 switch (status) { | 182 switch (status) { |
163 case BUFFER_FILLED: | 183 case BUFFER_FILLED: |
164 return true; | 184 return true; |
(...skipping 18 matching lines...) Expand all Loading... |
183 void URLRequestSlowDownloadJob::CheckDoneStatus() { | 203 void URLRequestSlowDownloadJob::CheckDoneStatus() { |
184 if (should_finish_download_) { | 204 if (should_finish_download_) { |
185 VLOG(10) << __FUNCTION__ << " called w/ should_finish_download_ set."; | 205 VLOG(10) << __FUNCTION__ << " called w/ should_finish_download_ set."; |
186 DCHECK(NULL != buffer_); | 206 DCHECK(NULL != buffer_); |
187 int bytes_written = 0; | 207 int bytes_written = 0; |
188 ReadStatus status = FillBufferHelper(buffer_, buffer_size_, &bytes_written); | 208 ReadStatus status = FillBufferHelper(buffer_, buffer_size_, &bytes_written); |
189 DCHECK_EQ(BUFFER_FILLED, status); | 209 DCHECK_EQ(BUFFER_FILLED, status); |
190 buffer_ = NULL; // Release the reference. | 210 buffer_ = NULL; // Release the reference. |
191 SetStatus(net::URLRequestStatus()); | 211 SetStatus(net::URLRequestStatus()); |
192 NotifyReadComplete(bytes_written); | 212 NotifyReadComplete(bytes_written); |
| 213 } else if (should_error_download_) { |
| 214 VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set."; |
| 215 NotifyDone(net::URLRequestStatus( |
| 216 net::URLRequestStatus::FAILED, net::ERR_CONNECTION_RESET)); |
193 } else { | 217 } else { |
194 MessageLoop::current()->PostDelayedTask( | 218 MessageLoop::current()->PostDelayedTask( |
195 FROM_HERE, | 219 FROM_HERE, |
196 base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, | 220 base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, |
197 weak_factory_.GetWeakPtr()), | 221 weak_factory_.GetWeakPtr()), |
198 base::TimeDelta::FromMilliseconds(100)); | 222 base::TimeDelta::FromMilliseconds(100)); |
199 } | 223 } |
200 } | 224 } |
201 | 225 |
202 // Public virtual version. | 226 // Public virtual version. |
203 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { | 227 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { |
204 // Forward to private const version. | 228 // Forward to private const version. |
205 GetResponseInfoConst(info); | 229 GetResponseInfoConst(info); |
206 } | 230 } |
207 | 231 |
208 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { | 232 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { |
209 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 233 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
210 pending_requests_.Get().erase(this); | 234 pending_requests_.Get().erase(this); |
211 } | 235 } |
212 | 236 |
213 // Private const version. | 237 // Private const version. |
214 void URLRequestSlowDownloadJob::GetResponseInfoConst( | 238 void URLRequestSlowDownloadJob::GetResponseInfoConst( |
215 net::HttpResponseInfo* info) const { | 239 net::HttpResponseInfo* info) const { |
216 // Send back mock headers. | 240 // Send back mock headers. |
217 std::string raw_headers; | 241 std::string raw_headers; |
218 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 242 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 243 request_->url().spec().c_str()) || |
| 244 LowerCaseEqualsASCII(kErrorDownloadUrl, |
219 request_->url().spec().c_str())) { | 245 request_->url().spec().c_str())) { |
220 raw_headers.append( | 246 raw_headers.append( |
221 "HTTP/1.1 200 OK\n" | 247 "HTTP/1.1 200 OK\n" |
222 "Content-type: text/plain\n"); | 248 "Content-type: text/plain\n"); |
223 } else { | 249 } else { |
224 raw_headers.append( | 250 raw_headers.append( |
225 "HTTP/1.1 200 OK\n" | 251 "HTTP/1.1 200 OK\n" |
226 "Content-type: application/octet-stream\n" | 252 "Content-type: application/octet-stream\n" |
227 "Cache-Control: max-age=0\n"); | 253 "Cache-Control: max-age=0\n"); |
228 | 254 |
229 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { | 255 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { |
230 raw_headers.append(base::StringPrintf( | 256 raw_headers.append(base::StringPrintf( |
231 "Content-Length: %d\n", | 257 "Content-Length: %d\n", |
232 kFirstDownloadSize + kSecondDownloadSize)); | 258 kFirstDownloadSize + kSecondDownloadSize)); |
233 } | 259 } |
234 } | 260 } |
235 | 261 |
236 // ParseRawHeaders expects \0 to end each header line. | 262 // ParseRawHeaders expects \0 to end each header line. |
237 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | 263 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
238 info->headers = new net::HttpResponseHeaders(raw_headers); | 264 info->headers = new net::HttpResponseHeaders(raw_headers); |
239 } | 265 } |
240 | 266 |
241 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | 267 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { |
242 net::HttpResponseInfo info; | 268 net::HttpResponseInfo info; |
243 GetResponseInfoConst(&info); | 269 GetResponseInfoConst(&info); |
244 return info.headers && info.headers->GetMimeType(mime_type); | 270 return info.headers && info.headers->GetMimeType(mime_type); |
245 } | 271 } |
246 | 272 |
247 } // namespace content | 273 } // namespace content |
OLD | NEW |