OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "net/test/url_request/url_request_slow_download_job.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/compiler_specific.h" | |
9 #include "base/logging.h" | |
10 #include "base/message_loop/message_loop.h" | |
11 #include "base/strings/string_util.h" | |
12 #include "base/strings/stringprintf.h" | |
13 #include "net/base/io_buffer.h" | |
14 #include "net/base/net_errors.h" | |
15 #include "net/http/http_response_headers.h" | |
16 #include "net/url_request/url_request.h" | |
17 #include "net/url_request/url_request_filter.h" | |
18 #include "url/gurl.h" | |
19 | |
20 namespace net { | |
21 | |
22 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = | |
23 "http://url.handled.by.slow.download/download-unknown-size"; | |
24 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = | |
25 "http://url.handled.by.slow.download/download-known-size"; | |
26 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = | |
27 "http://url.handled.by.slow.download/download-finish"; | |
28 const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] = | |
29 "http://url.handled.by.slow.download/download-error"; | |
30 | |
31 const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35; | |
32 const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10; | |
33 | |
34 // static | |
35 base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky | |
36 URLRequestSlowDownloadJob::pending_requests_ = LAZY_INSTANCE_INITIALIZER; | |
37 | |
38 void URLRequestSlowDownloadJob::Start() { | |
39 base::MessageLoop::current()->PostTask( | |
40 FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::StartAsync, | |
41 weak_factory_.GetWeakPtr())); | |
42 } | |
43 | |
44 // static | |
45 void URLRequestSlowDownloadJob::AddUrlHandler() { | |
46 URLRequestFilter* filter = URLRequestFilter::GetInstance(); | |
47 filter->AddUrlHandler(GURL(kUnknownSizeUrl), | |
48 &URLRequestSlowDownloadJob::Factory); | |
49 filter->AddUrlHandler(GURL(kKnownSizeUrl), | |
50 &URLRequestSlowDownloadJob::Factory); | |
51 filter->AddUrlHandler(GURL(kFinishDownloadUrl), | |
52 &URLRequestSlowDownloadJob::Factory); | |
53 filter->AddUrlHandler(GURL(kErrorDownloadUrl), | |
54 &URLRequestSlowDownloadJob::Factory); | |
55 } | |
56 | |
57 // static | |
58 URLRequestJob* URLRequestSlowDownloadJob::Factory( | |
59 URLRequest* request, | |
60 NetworkDelegate* network_delegate, | |
61 const std::string& scheme) { | |
62 URLRequestSlowDownloadJob* job = | |
63 new URLRequestSlowDownloadJob(request, network_delegate); | |
64 if (request->url().spec() != kFinishDownloadUrl && | |
65 request->url().spec() != kErrorDownloadUrl) | |
66 pending_requests_.Get().insert(job); | |
67 return job; | |
68 } | |
69 | |
70 // static | |
71 size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() { | |
72 return pending_requests_.Get().size(); | |
73 } | |
74 | |
75 // static | |
76 void URLRequestSlowDownloadJob::FinishPendingRequests() { | |
77 typedef std::set<URLRequestSlowDownloadJob*> JobList; | |
78 for (JobList::iterator it = pending_requests_.Get().begin(); | |
79 it != pending_requests_.Get().end(); ++it) { | |
80 (*it)->set_should_finish_download(); | |
81 } | |
82 } | |
83 | |
84 void URLRequestSlowDownloadJob::ErrorPendingRequests() { | |
85 typedef std::set<URLRequestSlowDownloadJob*> JobList; | |
86 for (JobList::iterator it = pending_requests_.Get().begin(); | |
87 it != pending_requests_.Get().end(); ++it) { | |
88 (*it)->set_should_error_download(); | |
89 } | |
90 } | |
91 | |
92 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( | |
93 URLRequest* request, | |
94 NetworkDelegate* network_delegate) | |
95 : URLRequestJob(request, network_delegate), | |
96 bytes_already_sent_(0), | |
97 should_error_download_(false), | |
98 should_finish_download_(false), | |
99 buffer_size_(0), | |
100 weak_factory_(this) { | |
101 } | |
102 | |
103 void URLRequestSlowDownloadJob::StartAsync() { | |
104 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) | |
105 URLRequestSlowDownloadJob::FinishPendingRequests(); | |
106 if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) | |
107 URLRequestSlowDownloadJob::ErrorPendingRequests(); | |
108 | |
109 NotifyHeadersComplete(); | |
110 } | |
111 | |
112 // ReadRawData and CheckDoneStatus together implement a state | |
113 // machine. ReadRawData may be called arbitrarily by the network stack. | |
114 // It responds by: | |
115 // * If there are bytes remaining in the first chunk, they are | |
116 // returned. | |
117 // [No bytes remaining in first chunk. ] | |
118 // * If should_finish_download_ is not set, it returns IO_PENDING, | |
119 // and starts calling CheckDoneStatus on a regular timer. | |
120 // [should_finish_download_ set.] | |
121 // * If there are bytes remaining in the second chunk, they are filled. | |
122 // * Otherwise, return *bytes_read = 0 to indicate end of request. | |
123 // CheckDoneStatus is called on a regular basis, in the specific | |
124 // case where we have transmitted all of the first chunk and none of the | |
125 // second. If should_finish_download_ becomes set, it will "complete" | |
126 // the ReadRawData call that spawned off the CheckDoneStatus() repeated call. | |
127 // | |
128 // FillBufferHelper is a helper function that does the actual work of figuring | |
129 // out where in the state machine we are and how we should fill the buffer. | |
130 // It returns an enum indicating the state of the read. | |
131 URLRequestSlowDownloadJob::ReadStatus | |
132 URLRequestSlowDownloadJob::FillBufferHelper(IOBuffer* buf, | |
133 int buf_size, | |
134 int* bytes_written) { | |
135 if (bytes_already_sent_ < kFirstDownloadSize) { | |
136 int bytes_to_write = | |
137 std::min(kFirstDownloadSize - bytes_already_sent_, buf_size); | |
138 for (int i = 0; i < bytes_to_write; ++i) { | |
139 buf->data()[i] = '*'; | |
140 } | |
141 *bytes_written = bytes_to_write; | |
142 bytes_already_sent_ += bytes_to_write; | |
143 return BUFFER_FILLED; | |
144 } | |
145 | |
146 if (!should_finish_download_) | |
147 return REQUEST_BLOCKED; | |
148 | |
149 if (bytes_already_sent_ < kFirstDownloadSize + kSecondDownloadSize) { | |
150 int bytes_to_write = | |
151 std::min(kFirstDownloadSize + kSecondDownloadSize - bytes_already_sent_, | |
152 buf_size); | |
153 for (int i = 0; i < bytes_to_write; ++i) { | |
154 buf->data()[i] = '*'; | |
155 } | |
156 *bytes_written = bytes_to_write; | |
157 bytes_already_sent_ += bytes_to_write; | |
158 return BUFFER_FILLED; | |
159 } | |
160 | |
161 return REQUEST_COMPLETE; | |
162 } | |
163 | |
164 bool URLRequestSlowDownloadJob::ReadRawData(IOBuffer* buf, | |
165 int buf_size, | |
166 int* bytes_read) { | |
167 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | |
168 request_->url().spec().c_str()) || | |
169 LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { | |
170 VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; | |
171 *bytes_read = 0; | |
172 return true; | |
173 } | |
174 | |
175 VLOG(10) << __FUNCTION__ << " called at position " << bytes_already_sent_ | |
176 << " in the stream."; | |
177 ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); | |
178 switch (status) { | |
179 case BUFFER_FILLED: | |
180 return true; | |
181 case REQUEST_BLOCKED: | |
182 buffer_ = buf; | |
183 buffer_size_ = buf_size; | |
184 SetStatus(URLRequestStatus(URLRequestStatus::IO_PENDING, 0)); | |
185 base::MessageLoop::current()->PostDelayedTask( | |
186 FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, | |
187 weak_factory_.GetWeakPtr()), | |
188 base::TimeDelta::FromMilliseconds(100)); | |
189 return false; | |
190 case REQUEST_COMPLETE: | |
191 *bytes_read = 0; | |
192 return true; | |
193 } | |
194 NOTREACHED(); | |
195 return true; | |
196 } | |
197 | |
198 void URLRequestSlowDownloadJob::CheckDoneStatus() { | |
199 if (should_finish_download_) { | |
200 VLOG(10) << __FUNCTION__ << " called w/ should_finish_download_ set."; | |
201 DCHECK(NULL != buffer_.get()); | |
202 int bytes_written = 0; | |
203 ReadStatus status = | |
204 FillBufferHelper(buffer_.get(), buffer_size_, &bytes_written); | |
205 DCHECK_EQ(BUFFER_FILLED, status); | |
206 buffer_ = NULL; // Release the reference. | |
207 SetStatus(URLRequestStatus()); | |
208 NotifyReadComplete(bytes_written); | |
209 } else if (should_error_download_) { | |
210 VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set."; | |
211 NotifyDone( | |
212 URLRequestStatus(URLRequestStatus::FAILED, ERR_CONNECTION_RESET)); | |
213 } else { | |
214 base::MessageLoop::current()->PostDelayedTask( | |
215 FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, | |
216 weak_factory_.GetWeakPtr()), | |
217 base::TimeDelta::FromMilliseconds(100)); | |
218 } | |
219 } | |
220 | |
221 // Public virtual version. | |
222 void URLRequestSlowDownloadJob::GetResponseInfo(HttpResponseInfo* info) { | |
223 // Forward to private const version. | |
224 GetResponseInfoConst(info); | |
225 } | |
226 | |
227 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { | |
228 pending_requests_.Get().erase(this); | |
229 } | |
230 | |
231 // Private const version. | |
232 void URLRequestSlowDownloadJob::GetResponseInfoConst( | |
233 HttpResponseInfo* info) const { | |
234 // Send back mock headers. | |
235 std::string raw_headers; | |
236 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | |
237 request_->url().spec().c_str()) || | |
238 LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { | |
239 raw_headers.append( | |
240 "HTTP/1.1 200 OK\n" | |
241 "Content-type: text/plain\n"); | |
242 } else { | |
243 raw_headers.append( | |
244 "HTTP/1.1 200 OK\n" | |
245 "Content-type: application/octet-stream\n" | |
246 "Cache-Control: max-age=0\n"); | |
247 | |
248 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { | |
249 raw_headers.append(base::StringPrintf( | |
250 "Content-Length: %d\n", kFirstDownloadSize + kSecondDownloadSize)); | |
251 } | |
252 } | |
253 | |
254 // ParseRawHeaders expects \0 to end each header line. | |
255 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | |
256 info->headers = new HttpResponseHeaders(raw_headers); | |
257 } | |
258 | |
259 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | |
260 HttpResponseInfo info; | |
261 GetResponseInfoConst(&info); | |
262 return info.headers.get() && info.headers->GetMimeType(mime_type); | |
263 } | |
264 | |
265 } // namespace net | |
OLD | NEW |