OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "net/cert/cert_net_fetcher.h" | |
6 | |
7 #include "base/logging.h" | |
8 #include "base/numerics/safe_math.h" | |
9 #include "base/stl_util.h" | |
10 #include "base/timer/timer.h" | |
11 #include "net/base/load_flags.h" | |
12 #include "net/url_request/redirect_info.h" | |
13 #include "net/url_request/url_request_context.h" | |
14 | |
15 // TODO(eroman): Add support for POST parameters. | |
16 // TODO(eroman): Add controls for bypassing the cache. | |
17 // TODO(eroman): Add a maximum number of in-flight jobs/requests. | |
18 | |
19 namespace net { | |
20 | |
21 namespace { | |
22 | |
23 // The error returned when the response body exceeded the size limit. | |
24 const Error kNetErrorResponseTooLarge = ERR_FILE_TOO_BIG; | |
25 | |
26 // The error returned when the URL could not be fetched because it was not an | |
27 // allowed scheme (http). | |
28 const Error kNetErrorNotHttpUrl = ERR_DISALLOWED_URL_SCHEME; | |
29 | |
30 // The error returned when the URL fetch did not complete in time. | |
31 const Error kNetErrorTimedOut = ERR_TIMED_OUT; | |
32 | |
33 // The error returned when the response was HTTP however it did not have a | |
34 // status of 200/OK. | |
35 // TODO(eroman): Use a more specific error code. | |
36 const Error kNetErrorNot200HttpResponse = ERR_FAILED; | |
37 | |
38 // The size of the buffer used for reading the response body of the URLRequest. | |
39 const int kReadBufferSizeInBytes = 4096; | |
40 | |
41 // The maximum size in bytes for the response body when fetching a CRL. | |
42 const int kMaxResponseSizeInBytesForCrl = 5 * 1024 * 1024; | |
43 | |
44 // The maximum size in bytes for the response body when fetching an AIA URL | |
45 // (caIssuers/OCSP). | |
46 const int kMaxResponseSizeInBytesForAia = 64 * 1024; | |
47 | |
48 // The default timeout in seconds for fetch requests. | |
49 const int kTimeoutSeconds = 15; | |
50 | |
51 // Policy for which URLs are allowed to be fetched. This is called both for the | |
52 // initial URL and for each redirect. Returns OK on success or a net error | |
53 // code on failure. | |
54 Error CanFetchUrl(const GURL& url) { | |
55 if (!url.SchemeIs("http")) | |
56 return kNetErrorNotHttpUrl; | |
57 return OK; | |
58 } | |
59 | |
60 base::TimeDelta GetTimeout(int timeout_milliseconds) { | |
61 if (timeout_milliseconds == CertNetFetcher::DEFAULT) | |
62 return base::TimeDelta::FromSeconds(kTimeoutSeconds); | |
63 return base::TimeDelta::FromMilliseconds(timeout_milliseconds); | |
64 } | |
65 | |
66 size_t GetMaxResponseBytes(int max_response_bytes, | |
67 size_t default_max_response_bytes) { | |
68 if (max_response_bytes == CertNetFetcher::DEFAULT) | |
69 return default_max_response_bytes; | |
70 | |
71 // Ensure that the specified limit is not negative, and cannot result in an | |
72 // overflow while reading. | |
73 base::CheckedNumeric<size_t> check(max_response_bytes); | |
74 check += kReadBufferSizeInBytes; | |
75 DCHECK(check.IsValid()); | |
76 | |
77 return max_response_bytes; | |
78 } | |
79 | |
80 enum HttpMethod { | |
81 HTTP_METHOD_GET, | |
82 HTTP_METHOD_POST, | |
83 }; | |
84 | |
85 } // namespace | |
86 | |
87 // CertNetFetcher::Request tracks an outstanding call to Fetch(). | |
88 struct CertNetFetcher::Request { | |
89 Request(FetchCallback callback, Job* job) : callback(callback), job(job) {} | |
90 | |
91 // The callback to invoke when the request has completed. | |
92 FetchCallback callback; | |
93 | |
94 // A non-owned pointer to the job that is executing the request (and in effect | |
95 // owns |this|). | |
96 Job* job; | |
97 | |
98 private: | |
99 DISALLOW_COPY_AND_ASSIGN(Request); | |
100 }; | |
101 | |
102 struct CertNetFetcher::RequestParams { | |
103 RequestParams(); | |
104 | |
105 bool operator<(const RequestParams& other) const; | |
106 | |
107 GURL url; | |
108 HttpMethod http_method; | |
109 size_t max_response_bytes; | |
110 | |
111 // If set to a value <= 0 then means "no timeout". | |
112 base::TimeDelta timeout; | |
113 | |
114 // IMPORTANT: When adding fields to this structure, update operator<(). | |
115 | |
116 private: | |
117 DISALLOW_COPY_AND_ASSIGN(RequestParams); | |
118 }; | |
119 | |
120 CertNetFetcher::RequestParams::RequestParams() | |
121 : http_method(HTTP_METHOD_GET), max_response_bytes(0) { | |
122 } | |
123 | |
124 bool CertNetFetcher::RequestParams::operator<( | |
125 const RequestParams& other) const { | |
126 if (url != other.url) | |
127 return url < other.url; | |
128 if (http_method != other.http_method) | |
129 return http_method < other.http_method; | |
130 if (max_response_bytes != other.max_response_bytes) | |
131 return max_response_bytes < other.max_response_bytes; | |
132 if (timeout != other.timeout) | |
133 return timeout < other.timeout; | |
134 return false; | |
Ryan Sleevi
2015/03/25 06:16:37
Normally when writing conditional comparators, I l
eroman
2015/03/26 03:50:49
Done.
| |
135 } | |
136 | |
137 // CertNetFetcher::Job tracks an outstanding URLRequest as well as all of the | |
138 // pending requests for it. | |
139 class CertNetFetcher::Job : public URLRequest::Delegate { | |
140 public: | |
141 Job(scoped_ptr<RequestParams> request_params, CertNetFetcher* parent); | |
142 ~Job() override; | |
143 | |
144 // Cancels the job and all requests attached to it. No callbacks will be | |
145 // invoked following cancellation. | |
146 void Cancel(); | |
147 | |
148 const RequestParams& request_params() const { return *request_params_; } | |
149 | |
150 // Attaches a request to the job. When the job completes it will invoke | |
151 // |callback|. | |
152 RequestId AddRequest(const FetchCallback& callback); | |
153 | |
154 // Removes |request| from the job and deletes it. | |
155 void CancelRequest(RequestId request); | |
156 | |
157 // Creates and starts a URLRequest for the job. After the request has | |
158 // completed, OnJobCompleted() will be invoked and all the registered requests | |
159 // notified of completion. | |
160 void StartURLRequest(URLRequestContext* context); | |
161 | |
162 private: | |
163 // The pointers in RequestList are owned by the Job. | |
164 using RequestList = std::vector<Request*>; | |
165 | |
166 // Implementation of URLRequest::Delegate | |
167 void OnReceivedRedirect(URLRequest* request, | |
168 const RedirectInfo& redirect_info, | |
169 bool* defer_redirect) override; | |
170 void OnResponseStarted(URLRequest* request) override; | |
171 void OnReadCompleted(URLRequest* request, int bytes_read) override; | |
172 | |
173 // Clears the URLRequest and timer. Helper for doing work common to | |
174 // cancellation and job completion. | |
175 void Stop(); | |
176 | |
177 // Reads as much data as available from the |request|. | |
178 void ReadBody(URLRequest* request); | |
179 | |
180 // Helper to copy the partial bytes read from the read IOBuffer to an | |
181 // aggregated buffer. | |
182 bool ConsumeBytesRead(URLRequest* request, int num_bytes); | |
183 | |
184 // Called once the job has exceeded its deadline. | |
185 void OnTimeout(); | |
186 | |
187 // Called when the URLRequest has completed (either success or failure). | |
188 void OnUrlRequestCompleted(URLRequest* request); | |
189 | |
190 // Called when the Job has completed. The job may finish in response to a | |
191 // timeout, an invalid URL, or the URLRequest completing. By the time this | |
192 // method is called, the response variables have been assigned | |
193 // (result_net_error_code_ et al). | |
194 void OnJobCompleted(); | |
195 | |
196 // The requests attached to this job. | |
197 RequestList requests_; | |
198 | |
199 // The input parameters for starting a URLRequest. | |
200 scoped_ptr<RequestParams> request_params_; | |
201 | |
202 // The URLRequest response information. | |
203 std::vector<uint8_t> response_body_; | |
204 Error result_net_error_; | |
205 | |
206 scoped_ptr<URLRequest> url_request_; | |
207 scoped_refptr<IOBuffer> read_buffer_; | |
208 | |
209 // Used to timeout the job when the URLRequest takes too long. This timer is | |
210 // also used for notifying a failure to start the URLRequest. | |
211 base::OneShotTimer<Job> timer_; | |
212 | |
213 // Non-owned pointer to the CertNetFetcher that created this job. | |
214 CertNetFetcher* parent_; | |
215 | |
216 DISALLOW_COPY_AND_ASSIGN(Job); | |
217 }; | |
218 | |
219 CertNetFetcher::Job::Job(scoped_ptr<RequestParams> request_params, | |
220 CertNetFetcher* parent) | |
221 : request_params_(request_params.Pass()), | |
222 result_net_error_(ERR_IO_PENDING), | |
223 parent_(parent) { | |
224 } | |
225 | |
226 CertNetFetcher::Job::~Job() { | |
227 Cancel(); | |
228 } | |
229 | |
230 void CertNetFetcher::Job::Cancel() { | |
231 parent_ = NULL; | |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
| |
232 STLDeleteElements(&requests_); | |
233 Stop(); | |
234 } | |
235 | |
236 CertNetFetcher::RequestId CertNetFetcher::Job::AddRequest( | |
237 const FetchCallback& callback) { | |
238 requests_.push_back(new Request(callback, this)); | |
239 return requests_.back(); | |
240 } | |
241 | |
242 void CertNetFetcher::Job::CancelRequest(RequestId request) { | |
243 scoped_ptr<Job> delete_this; | |
244 | |
245 RequestList::iterator it = | |
246 std::find(requests_.begin(), requests_.end(), request); | |
247 DCHECK(it != requests_.end()); | |
Ryan Sleevi
2015/03/25 06:16:37
Should this be a CHECK() instead?
Just 248 makes
eroman
2015/03/26 03:50:49
Done.
| |
248 requests_.erase(it); | |
249 delete request; | |
250 | |
251 // If there are no longer any requests attached to the job then | |
252 // cancel and delete it. | |
253 if (requests_.empty() && parent_) | |
254 delete_this = parent_->RemoveJob(this); | |
255 } | |
256 | |
257 void CertNetFetcher::Job::StartURLRequest(URLRequestContext* context) { | |
258 Error error = CanFetchUrl(request_params_->url); | |
259 if (error != OK) { | |
260 result_net_error_ = error; | |
261 // The CertNetFetcher's API contract is that requests always complete | |
262 // asynchronously. Use the timer class so the task is easily cancelled. | |
263 timer_.Start(FROM_HERE, base::TimeDelta(), this, &Job::OnJobCompleted); | |
264 return; | |
265 } | |
266 | |
267 // Start the URLRequest. | |
268 read_buffer_ = new IOBuffer(kReadBufferSizeInBytes); | |
269 url_request_ = context->CreateRequest(request_params_->url, DEFAULT_PRIORITY, | |
270 this, NULL); | |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
| |
271 if (request_params_->http_method == HTTP_METHOD_POST) | |
272 url_request_->set_method("POST"); | |
273 url_request_->SetLoadFlags(LOAD_DO_NOT_SAVE_COOKIES | | |
274 LOAD_DO_NOT_SEND_COOKIES); | |
275 url_request_->Start(); | |
276 | |
277 // Start a timer to limit how long the job runs for. | |
278 if (request_params_->timeout > base::TimeDelta()) | |
279 timer_.Start(FROM_HERE, request_params_->timeout, this, &Job::OnTimeout); | |
280 } | |
281 | |
282 void CertNetFetcher::Job::OnReceivedRedirect(URLRequest* request, | |
283 const RedirectInfo& redirect_info, | |
284 bool* defer_redirect) { | |
285 DCHECK_EQ(url_request_.get(), request); | |
286 | |
287 // Ensure that the new URL matches the policy. | |
288 Error error = CanFetchUrl(redirect_info.new_url); | |
289 if (error != OK) { | |
290 request->CancelWithError(error); | |
291 OnUrlRequestCompleted(request); | |
292 return; | |
293 } | |
294 } | |
295 | |
296 void CertNetFetcher::Job::OnResponseStarted(URLRequest* request) { | |
297 DCHECK_EQ(url_request_.get(), request); | |
298 | |
299 if (!request->status().is_success()) { | |
300 OnUrlRequestCompleted(request); | |
301 return; | |
302 } | |
303 | |
304 // In practice all URLs fetched are HTTP, but check anyway as defensive | |
305 // measure in case the policy is ever changed. | |
306 if (request->GetResponseCode() != 200) { | |
307 request->CancelWithError(kNetErrorNot200HttpResponse); | |
308 OnUrlRequestCompleted(request); | |
309 return; | |
310 } | |
311 | |
312 ReadBody(request); | |
313 } | |
314 | |
315 void CertNetFetcher::Job::OnReadCompleted(URLRequest* request, int bytes_read) { | |
316 DCHECK_EQ(url_request_.get(), request); | |
317 | |
318 // Keep reading the response body. | |
319 if (ConsumeBytesRead(request, bytes_read)) | |
320 ReadBody(request); | |
321 } | |
322 | |
323 void CertNetFetcher::Job::Stop() { | |
324 timer_.Stop(); | |
325 url_request_.reset(); | |
326 } | |
327 | |
328 void CertNetFetcher::Job::ReadBody(URLRequest* request) { | |
329 // Read as many bytes as are available synchronously. | |
330 int num_bytes; | |
331 while ( | |
332 request->Read(read_buffer_.get(), kReadBufferSizeInBytes, &num_bytes)) { | |
333 if (!ConsumeBytesRead(request, num_bytes)) | |
334 return; | |
335 } | |
336 | |
337 // Check whether the read failed synchronously. | |
338 if (!request->status().is_io_pending()) | |
339 OnUrlRequestCompleted(request); | |
340 return; | |
341 } | |
342 | |
343 bool CertNetFetcher::Job::ConsumeBytesRead(URLRequest* request, int num_bytes) { | |
344 if (num_bytes <= 0) { | |
345 // Error while reading, or EOF. | |
346 OnUrlRequestCompleted(request); | |
347 return false; | |
348 } | |
349 | |
350 // Enforce maximum size bound. | |
351 if (num_bytes + response_body_.size() > request_params_->max_response_bytes) { | |
352 request->CancelWithError(kNetErrorResponseTooLarge); | |
353 OnUrlRequestCompleted(request); | |
354 return false; | |
355 } | |
356 | |
357 // Append the data to |response_body_|. | |
358 response_body_.reserve(response_body_.size() + num_bytes); | |
359 response_body_.insert(response_body_.end(), read_buffer_->data(), | |
360 read_buffer_->data() + num_bytes); | |
361 return true; | |
362 } | |
363 | |
364 void CertNetFetcher::Job::OnTimeout() { | |
365 result_net_error_ = kNetErrorTimedOut; | |
366 url_request_->CancelWithError(result_net_error_); | |
367 OnJobCompleted(); | |
368 } | |
369 | |
370 void CertNetFetcher::Job::OnUrlRequestCompleted(URLRequest* request) { | |
371 DCHECK_EQ(request, url_request_.get()); | |
372 | |
373 if (request->status().is_success()) | |
374 result_net_error_ = OK; | |
375 else | |
376 result_net_error_ = static_cast<Error>(request->status().error()); | |
377 | |
378 OnJobCompleted(); | |
379 } | |
380 | |
381 void CertNetFetcher::Job::OnJobCompleted() { | |
382 // Stop the timer and clear the URLRequest. | |
383 Stop(); | |
384 | |
385 // Invoking the callbacks is subtle as state may be mutated while iterating | |
386 // through the callbacks: | |
387 // | |
388 // * The parent CertNetFetcher may be deleted | |
389 // * Requests in this job may be cancelled | |
390 | |
391 scoped_ptr<Job> delete_this = parent_->RemoveJob(this); | |
392 parent_->SetCurrentlyCompletingJob(this); | |
393 | |
394 while (!requests_.empty()) { | |
395 scoped_ptr<Request> request(requests_.front()); | |
396 requests_.erase(requests_.begin()); | |
397 request->callback.Run(result_net_error_, response_body_); | |
398 } | |
399 | |
400 if (parent_) | |
401 parent_->SetCurrentlyCompletingJob(NULL); | |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
| |
402 } | |
403 | |
404 CertNetFetcher::CertNetFetcher(URLRequestContext* context) | |
405 : currently_completing_job_(NULL), context_(context) { | |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
| |
406 } | |
407 | |
408 CertNetFetcher::~CertNetFetcher() { | |
409 STLDeleteElements(&jobs_); | |
410 if (currently_completing_job_) | |
411 currently_completing_job_->Cancel(); | |
412 } | |
413 | |
414 void CertNetFetcher::CancelRequest(RequestId request) { | |
415 DCHECK(thread_checker_.CalledOnValidThread()); | |
416 | |
417 request->job->CancelRequest(request); | |
418 } | |
419 | |
420 CertNetFetcher::RequestId CertNetFetcher::FetchCaIssuers( | |
421 const GURL& url, | |
422 int timeout_milliseconds, | |
423 int max_response_bytes, | |
424 const FetchCallback& callback) { | |
425 scoped_ptr<RequestParams> request_params(new RequestParams); | |
426 | |
427 request_params->url = url; | |
428 request_params->http_method = HTTP_METHOD_GET; | |
429 request_params->timeout = GetTimeout(timeout_milliseconds); | |
430 request_params->max_response_bytes = | |
431 GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia); | |
432 | |
433 return Fetch(request_params.Pass(), callback); | |
434 } | |
435 | |
436 CertNetFetcher::RequestId CertNetFetcher::FetchCrl( | |
437 const GURL& url, | |
438 int timeout_milliseconds, | |
439 int max_response_bytes, | |
440 const FetchCallback& callback) { | |
441 scoped_ptr<RequestParams> request_params(new RequestParams); | |
442 | |
443 request_params->url = url; | |
444 request_params->http_method = HTTP_METHOD_GET; | |
445 request_params->timeout = GetTimeout(timeout_milliseconds); | |
446 request_params->max_response_bytes = | |
447 GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForCrl); | |
448 | |
449 return Fetch(request_params.Pass(), callback); | |
450 } | |
451 | |
452 CertNetFetcher::RequestId CertNetFetcher::FetchOcsp( | |
453 const GURL& url, | |
454 int timeout_milliseconds, | |
455 int max_response_bytes, | |
456 const FetchCallback& callback) { | |
457 scoped_ptr<RequestParams> request_params(new RequestParams); | |
458 | |
459 request_params->url = url; | |
460 request_params->http_method = HTTP_METHOD_GET; | |
461 request_params->timeout = GetTimeout(timeout_milliseconds); | |
462 request_params->max_response_bytes = | |
463 GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia); | |
464 | |
465 return Fetch(request_params.Pass(), callback); | |
466 } | |
467 | |
468 bool CertNetFetcher::JobComparator::operator()(const Job* job1, | |
469 const Job* job2) const { | |
470 return job1->request_params() < job2->request_params(); | |
471 } | |
472 | |
473 CertNetFetcher::RequestId CertNetFetcher::Fetch( | |
474 scoped_ptr<RequestParams> request_params, | |
475 const FetchCallback& callback) { | |
476 DCHECK(thread_checker_.CalledOnValidThread()); | |
477 | |
478 // If there is an in-progress job that matches the request parameters use it. | |
479 // Otherwise start a new job. | |
480 Job* job = FindJob(*request_params); | |
481 | |
482 if (!job) { | |
483 job = new Job(request_params.Pass(), this); | |
484 jobs_.insert(job); | |
485 job->StartURLRequest(context_); | |
486 } | |
487 | |
488 return job->AddRequest(callback); | |
489 } | |
490 | |
491 struct CertNetFetcher::JobToRequestParamsComparator { | |
492 bool operator()(const Job* job, | |
493 const CertNetFetcher::RequestParams& value) const { | |
494 return job->request_params() < value; | |
495 } | |
496 }; | |
497 | |
498 CertNetFetcher::Job* CertNetFetcher::FindJob(const RequestParams& params) { | |
499 DCHECK(thread_checker_.CalledOnValidThread()); | |
500 | |
501 // The JobSet is kept in sorted order so items can be found using binary | |
502 // search. | |
503 JobSet::iterator it = std::lower_bound(jobs_.begin(), jobs_.end(), params, | |
504 JobToRequestParamsComparator()); | |
505 if (it != jobs_.end() && !(params < (*it)->request_params())) | |
Ryan Sleevi
2015/03/25 06:16:37
Is this second conditional meant to be an equality
eroman
2015/03/26 03:50:49
It is essentially an equality check.
std::lower_b
| |
506 return *it; | |
507 return NULL; | |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
| |
508 } | |
509 | |
510 scoped_ptr<CertNetFetcher::Job> CertNetFetcher::RemoveJob(Job* job) { | |
511 DCHECK(thread_checker_.CalledOnValidThread()); | |
512 bool erased_job = jobs_.erase(job) == 1; | |
513 DCHECK(erased_job); | |
514 return scoped_ptr<Job>(job); | |
515 } | |
516 | |
517 void CertNetFetcher::SetCurrentlyCompletingJob(Job* job) { | |
518 currently_completing_job_ = job; | |
519 } | |
520 | |
521 } // namespace net | |
OLD | NEW |