Index: net/cert/cert_net_fetcher.cc |
diff --git a/net/cert/cert_net_fetcher.cc b/net/cert/cert_net_fetcher.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..5638a22abe9a20de0156411f26c68401c9e68134 |
--- /dev/null |
+++ b/net/cert/cert_net_fetcher.cc |
@@ -0,0 +1,521 @@ |
+// Copyright 2015 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "net/cert/cert_net_fetcher.h" |
+ |
+#include "base/logging.h" |
+#include "base/numerics/safe_math.h" |
+#include "base/stl_util.h" |
+#include "base/timer/timer.h" |
+#include "net/base/load_flags.h" |
+#include "net/url_request/redirect_info.h" |
+#include "net/url_request/url_request_context.h" |
+ |
+// TODO(eroman): Add support for POST parameters. |
+// TODO(eroman): Add controls for bypassing the cache. |
+// TODO(eroman): Add a maximum number of in-flight jobs/requests. |
+ |
+namespace net { |
+ |
+namespace { |
+ |
+// The error returned when the response body exceeded the size limit. |
+const Error kNetErrorResponseTooLarge = ERR_FILE_TOO_BIG; |
+ |
+// The error returned when the URL could not be fetched because it was not an |
+// allowed scheme (http). |
+const Error kNetErrorNotHttpUrl = ERR_DISALLOWED_URL_SCHEME; |
+ |
+// The error returned when the URL fetch did not complete in time. |
+const Error kNetErrorTimedOut = ERR_TIMED_OUT; |
+ |
+// The error returned when the response was HTTP however it did not have a |
+// status of 200/OK. |
+// TODO(eroman): Use a more specific error code. |
+const Error kNetErrorNot200HttpResponse = ERR_FAILED; |
+ |
+// The size of the buffer used for reading the response body of the URLRequest. |
+const int kReadBufferSizeInBytes = 4096; |
+ |
+// The maximum size in bytes for the response body when fetching a CRL. |
+const int kMaxResponseSizeInBytesForCrl = 5 * 1024 * 1024; |
+ |
+// The maximum size in bytes for the response body when fetching an AIA URL |
+// (caIssuers/OCSP). |
+const int kMaxResponseSizeInBytesForAia = 64 * 1024; |
+ |
+// The default timeout in seconds for fetch requests. |
+const int kTimeoutSeconds = 15; |
+ |
+// Policy for which URLs are allowed to be fetched. This is called both for the |
+// initial URL and for each redirect. Returns OK on success or a net error |
+// code on failure. |
+Error CanFetchUrl(const GURL& url) { |
+ if (!url.SchemeIs("http")) |
+ return kNetErrorNotHttpUrl; |
+ return OK; |
+} |
+ |
+base::TimeDelta GetTimeout(int timeout_milliseconds) { |
+ if (timeout_milliseconds == CertNetFetcher::DEFAULT) |
+ return base::TimeDelta::FromSeconds(kTimeoutSeconds); |
+ return base::TimeDelta::FromMilliseconds(timeout_milliseconds); |
+} |
+ |
+size_t GetMaxResponseBytes(int max_response_bytes, |
+ size_t default_max_response_bytes) { |
+ if (max_response_bytes == CertNetFetcher::DEFAULT) |
+ return default_max_response_bytes; |
+ |
+ // Ensure that the specified limit is not negative, and cannot result in an |
+ // overflow while reading. |
+ base::CheckedNumeric<size_t> check(max_response_bytes); |
+ check += kReadBufferSizeInBytes; |
+ DCHECK(check.IsValid()); |
+ |
+ return max_response_bytes; |
+} |
+ |
+enum HttpMethod { |
+ HTTP_METHOD_GET, |
+ HTTP_METHOD_POST, |
+}; |
+ |
+} // namespace |
+ |
+// CertNetFetcher::Request tracks an outstanding call to Fetch(). |
+struct CertNetFetcher::Request { |
+ Request(FetchCallback callback, Job* job) : callback(callback), job(job) {} |
+ |
+ // The callback to invoke when the request has completed. |
+ FetchCallback callback; |
+ |
+ // A non-owned pointer to the job that is executing the request (and in effect |
+ // owns |this|). |
+ Job* job; |
+ |
+ private: |
+ DISALLOW_COPY_AND_ASSIGN(Request); |
+}; |
+ |
+struct CertNetFetcher::RequestParams { |
+ RequestParams(); |
+ |
+ bool operator<(const RequestParams& other) const; |
+ |
+ GURL url; |
+ HttpMethod http_method; |
+ size_t max_response_bytes; |
+ |
+ // If set to a value <= 0 then means "no timeout". |
+ base::TimeDelta timeout; |
+ |
+ // IMPORTANT: When adding fields to this structure, update operator<(). |
+ |
+ private: |
+ DISALLOW_COPY_AND_ASSIGN(RequestParams); |
+}; |
+ |
+CertNetFetcher::RequestParams::RequestParams() |
+ : http_method(HTTP_METHOD_GET), max_response_bytes(0) { |
+} |
+ |
+bool CertNetFetcher::RequestParams::operator<( |
+ const RequestParams& other) const { |
+ if (url != other.url) |
+ return url < other.url; |
+ if (http_method != other.http_method) |
+ return http_method < other.http_method; |
+ if (max_response_bytes != other.max_response_bytes) |
+ return max_response_bytes < other.max_response_bytes; |
+ if (timeout != other.timeout) |
+ return timeout < other.timeout; |
+ return false; |
Ryan Sleevi
2015/03/25 06:16:37
Normally when writing conditional comparators, I l
eroman
2015/03/26 03:50:49
Done.
|
+} |
+ |
+// CertNetFetcher::Job tracks an outstanding URLRequest as well as all of the |
+// pending requests for it. |
+class CertNetFetcher::Job : public URLRequest::Delegate { |
+ public: |
+ Job(scoped_ptr<RequestParams> request_params, CertNetFetcher* parent); |
+ ~Job() override; |
+ |
+ // Cancels the job and all requests attached to it. No callbacks will be |
+ // invoked following cancellation. |
+ void Cancel(); |
+ |
+ const RequestParams& request_params() const { return *request_params_; } |
+ |
+ // Attaches a request to the job. When the job completes it will invoke |
+ // |callback|. |
+ RequestId AddRequest(const FetchCallback& callback); |
+ |
+ // Removes |request| from the job and deletes it. |
+ void CancelRequest(RequestId request); |
+ |
+ // Creates and starts a URLRequest for the job. After the request has |
+ // completed, OnJobCompleted() will be invoked and all the registered requests |
+ // notified of completion. |
+ void StartURLRequest(URLRequestContext* context); |
+ |
+ private: |
+ // The pointers in RequestList are owned by the Job. |
+ using RequestList = std::vector<Request*>; |
+ |
+ // Implementation of URLRequest::Delegate |
+ void OnReceivedRedirect(URLRequest* request, |
+ const RedirectInfo& redirect_info, |
+ bool* defer_redirect) override; |
+ void OnResponseStarted(URLRequest* request) override; |
+ void OnReadCompleted(URLRequest* request, int bytes_read) override; |
+ |
+ // Clears the URLRequest and timer. Helper for doing work common to |
+ // cancellation and job completion. |
+ void Stop(); |
+ |
+ // Reads as much data as available from the |request|. |
+ void ReadBody(URLRequest* request); |
+ |
+ // Helper to copy the partial bytes read from the read IOBuffer to an |
+ // aggregated buffer. |
+ bool ConsumeBytesRead(URLRequest* request, int num_bytes); |
+ |
+ // Called once the job has exceeded its deadline. |
+ void OnTimeout(); |
+ |
+ // Called when the URLRequest has completed (either success or failure). |
+ void OnUrlRequestCompleted(URLRequest* request); |
+ |
+ // Called when the Job has completed. The job may finish in response to a |
+ // timeout, an invalid URL, or the URLRequest completing. By the time this |
+ // method is called, the response variables have been assigned |
+ // (result_net_error_code_ et al). |
+ void OnJobCompleted(); |
+ |
+ // The requests attached to this job. |
+ RequestList requests_; |
+ |
+ // The input parameters for starting a URLRequest. |
+ scoped_ptr<RequestParams> request_params_; |
+ |
+ // The URLRequest response information. |
+ std::vector<uint8_t> response_body_; |
+ Error result_net_error_; |
+ |
+ scoped_ptr<URLRequest> url_request_; |
+ scoped_refptr<IOBuffer> read_buffer_; |
+ |
+ // Used to timeout the job when the URLRequest takes too long. This timer is |
+ // also used for notifying a failure to start the URLRequest. |
+ base::OneShotTimer<Job> timer_; |
+ |
+ // Non-owned pointer to the CertNetFetcher that created this job. |
+ CertNetFetcher* parent_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(Job); |
+}; |
+ |
+CertNetFetcher::Job::Job(scoped_ptr<RequestParams> request_params, |
+ CertNetFetcher* parent) |
+ : request_params_(request_params.Pass()), |
+ result_net_error_(ERR_IO_PENDING), |
+ parent_(parent) { |
+} |
+ |
+CertNetFetcher::Job::~Job() { |
+ Cancel(); |
+} |
+ |
+void CertNetFetcher::Job::Cancel() { |
+ parent_ = NULL; |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
|
+ STLDeleteElements(&requests_); |
+ Stop(); |
+} |
+ |
+CertNetFetcher::RequestId CertNetFetcher::Job::AddRequest( |
+ const FetchCallback& callback) { |
+ requests_.push_back(new Request(callback, this)); |
+ return requests_.back(); |
+} |
+ |
+void CertNetFetcher::Job::CancelRequest(RequestId request) { |
+ scoped_ptr<Job> delete_this; |
+ |
+ RequestList::iterator it = |
+ std::find(requests_.begin(), requests_.end(), request); |
+ DCHECK(it != requests_.end()); |
Ryan Sleevi
2015/03/25 06:16:37
Should this be a CHECK() instead?
Just 248 makes
eroman
2015/03/26 03:50:49
Done.
|
+ requests_.erase(it); |
+ delete request; |
+ |
+ // If there are no longer any requests attached to the job then |
+ // cancel and delete it. |
+ if (requests_.empty() && parent_) |
+ delete_this = parent_->RemoveJob(this); |
+} |
+ |
+void CertNetFetcher::Job::StartURLRequest(URLRequestContext* context) { |
+ Error error = CanFetchUrl(request_params_->url); |
+ if (error != OK) { |
+ result_net_error_ = error; |
+ // The CertNetFetcher's API contract is that requests always complete |
+ // asynchronously. Use the timer class so the task is easily cancelled. |
+ timer_.Start(FROM_HERE, base::TimeDelta(), this, &Job::OnJobCompleted); |
+ return; |
+ } |
+ |
+ // Start the URLRequest. |
+ read_buffer_ = new IOBuffer(kReadBufferSizeInBytes); |
+ url_request_ = context->CreateRequest(request_params_->url, DEFAULT_PRIORITY, |
+ this, NULL); |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
|
+ if (request_params_->http_method == HTTP_METHOD_POST) |
+ url_request_->set_method("POST"); |
+ url_request_->SetLoadFlags(LOAD_DO_NOT_SAVE_COOKIES | |
+ LOAD_DO_NOT_SEND_COOKIES); |
+ url_request_->Start(); |
+ |
+ // Start a timer to limit how long the job runs for. |
+ if (request_params_->timeout > base::TimeDelta()) |
+ timer_.Start(FROM_HERE, request_params_->timeout, this, &Job::OnTimeout); |
+} |
+ |
+void CertNetFetcher::Job::OnReceivedRedirect(URLRequest* request, |
+ const RedirectInfo& redirect_info, |
+ bool* defer_redirect) { |
+ DCHECK_EQ(url_request_.get(), request); |
+ |
+ // Ensure that the new URL matches the policy. |
+ Error error = CanFetchUrl(redirect_info.new_url); |
+ if (error != OK) { |
+ request->CancelWithError(error); |
+ OnUrlRequestCompleted(request); |
+ return; |
+ } |
+} |
+ |
+void CertNetFetcher::Job::OnResponseStarted(URLRequest* request) { |
+ DCHECK_EQ(url_request_.get(), request); |
+ |
+ if (!request->status().is_success()) { |
+ OnUrlRequestCompleted(request); |
+ return; |
+ } |
+ |
+ // In practice all URLs fetched are HTTP, but check anyway as defensive |
+ // measure in case the policy is ever changed. |
+ if (request->GetResponseCode() != 200) { |
+ request->CancelWithError(kNetErrorNot200HttpResponse); |
+ OnUrlRequestCompleted(request); |
+ return; |
+ } |
+ |
+ ReadBody(request); |
+} |
+ |
+void CertNetFetcher::Job::OnReadCompleted(URLRequest* request, int bytes_read) { |
+ DCHECK_EQ(url_request_.get(), request); |
+ |
+ // Keep reading the response body. |
+ if (ConsumeBytesRead(request, bytes_read)) |
+ ReadBody(request); |
+} |
+ |
+void CertNetFetcher::Job::Stop() { |
+ timer_.Stop(); |
+ url_request_.reset(); |
+} |
+ |
+void CertNetFetcher::Job::ReadBody(URLRequest* request) { |
+ // Read as many bytes as are available synchronously. |
+ int num_bytes; |
+ while ( |
+ request->Read(read_buffer_.get(), kReadBufferSizeInBytes, &num_bytes)) { |
+ if (!ConsumeBytesRead(request, num_bytes)) |
+ return; |
+ } |
+ |
+ // Check whether the read failed synchronously. |
+ if (!request->status().is_io_pending()) |
+ OnUrlRequestCompleted(request); |
+ return; |
+} |
+ |
+bool CertNetFetcher::Job::ConsumeBytesRead(URLRequest* request, int num_bytes) { |
+ if (num_bytes <= 0) { |
+ // Error while reading, or EOF. |
+ OnUrlRequestCompleted(request); |
+ return false; |
+ } |
+ |
+ // Enforce maximum size bound. |
+ if (num_bytes + response_body_.size() > request_params_->max_response_bytes) { |
+ request->CancelWithError(kNetErrorResponseTooLarge); |
+ OnUrlRequestCompleted(request); |
+ return false; |
+ } |
+ |
+ // Append the data to |response_body_|. |
+ response_body_.reserve(response_body_.size() + num_bytes); |
+ response_body_.insert(response_body_.end(), read_buffer_->data(), |
+ read_buffer_->data() + num_bytes); |
+ return true; |
+} |
+ |
+void CertNetFetcher::Job::OnTimeout() { |
+ result_net_error_ = kNetErrorTimedOut; |
+ url_request_->CancelWithError(result_net_error_); |
+ OnJobCompleted(); |
+} |
+ |
+void CertNetFetcher::Job::OnUrlRequestCompleted(URLRequest* request) { |
+ DCHECK_EQ(request, url_request_.get()); |
+ |
+ if (request->status().is_success()) |
+ result_net_error_ = OK; |
+ else |
+ result_net_error_ = static_cast<Error>(request->status().error()); |
+ |
+ OnJobCompleted(); |
+} |
+ |
+void CertNetFetcher::Job::OnJobCompleted() { |
+ // Stop the timer and clear the URLRequest. |
+ Stop(); |
+ |
+ // Invoking the callbacks is subtle as state may be mutated while iterating |
+ // through the callbacks: |
+ // |
+ // * The parent CertNetFetcher may be deleted |
+ // * Requests in this job may be cancelled |
+ |
+ scoped_ptr<Job> delete_this = parent_->RemoveJob(this); |
+ parent_->SetCurrentlyCompletingJob(this); |
+ |
+ while (!requests_.empty()) { |
+ scoped_ptr<Request> request(requests_.front()); |
+ requests_.erase(requests_.begin()); |
+ request->callback.Run(result_net_error_, response_body_); |
+ } |
+ |
+ if (parent_) |
+ parent_->SetCurrentlyCompletingJob(NULL); |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
|
+} |
+ |
+CertNetFetcher::CertNetFetcher(URLRequestContext* context) |
+ : currently_completing_job_(NULL), context_(context) { |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
|
+} |
+ |
+CertNetFetcher::~CertNetFetcher() { |
+ STLDeleteElements(&jobs_); |
+ if (currently_completing_job_) |
+ currently_completing_job_->Cancel(); |
+} |
+ |
+void CertNetFetcher::CancelRequest(RequestId request) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ request->job->CancelRequest(request); |
+} |
+ |
+CertNetFetcher::RequestId CertNetFetcher::FetchCaIssuers( |
+ const GURL& url, |
+ int timeout_milliseconds, |
+ int max_response_bytes, |
+ const FetchCallback& callback) { |
+ scoped_ptr<RequestParams> request_params(new RequestParams); |
+ |
+ request_params->url = url; |
+ request_params->http_method = HTTP_METHOD_GET; |
+ request_params->timeout = GetTimeout(timeout_milliseconds); |
+ request_params->max_response_bytes = |
+ GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia); |
+ |
+ return Fetch(request_params.Pass(), callback); |
+} |
+ |
+CertNetFetcher::RequestId CertNetFetcher::FetchCrl( |
+ const GURL& url, |
+ int timeout_milliseconds, |
+ int max_response_bytes, |
+ const FetchCallback& callback) { |
+ scoped_ptr<RequestParams> request_params(new RequestParams); |
+ |
+ request_params->url = url; |
+ request_params->http_method = HTTP_METHOD_GET; |
+ request_params->timeout = GetTimeout(timeout_milliseconds); |
+ request_params->max_response_bytes = |
+ GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForCrl); |
+ |
+ return Fetch(request_params.Pass(), callback); |
+} |
+ |
+CertNetFetcher::RequestId CertNetFetcher::FetchOcsp( |
+ const GURL& url, |
+ int timeout_milliseconds, |
+ int max_response_bytes, |
+ const FetchCallback& callback) { |
+ scoped_ptr<RequestParams> request_params(new RequestParams); |
+ |
+ request_params->url = url; |
+ request_params->http_method = HTTP_METHOD_GET; |
+ request_params->timeout = GetTimeout(timeout_milliseconds); |
+ request_params->max_response_bytes = |
+ GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia); |
+ |
+ return Fetch(request_params.Pass(), callback); |
+} |
+ |
+bool CertNetFetcher::JobComparator::operator()(const Job* job1, |
+ const Job* job2) const { |
+ return job1->request_params() < job2->request_params(); |
+} |
+ |
+CertNetFetcher::RequestId CertNetFetcher::Fetch( |
+ scoped_ptr<RequestParams> request_params, |
+ const FetchCallback& callback) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ // If there is an in-progress job that matches the request parameters use it. |
+ // Otherwise start a new job. |
+ Job* job = FindJob(*request_params); |
+ |
+ if (!job) { |
+ job = new Job(request_params.Pass(), this); |
+ jobs_.insert(job); |
+ job->StartURLRequest(context_); |
+ } |
+ |
+ return job->AddRequest(callback); |
+} |
+ |
+struct CertNetFetcher::JobToRequestParamsComparator { |
+ bool operator()(const Job* job, |
+ const CertNetFetcher::RequestParams& value) const { |
+ return job->request_params() < value; |
+ } |
+}; |
+ |
+CertNetFetcher::Job* CertNetFetcher::FindJob(const RequestParams& params) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ |
+ // The JobSet is kept in sorted order so items can be found using binary |
+ // search. |
+ JobSet::iterator it = std::lower_bound(jobs_.begin(), jobs_.end(), params, |
+ JobToRequestParamsComparator()); |
+ if (it != jobs_.end() && !(params < (*it)->request_params())) |
Ryan Sleevi
2015/03/25 06:16:37
Is this second conditional meant to be an equality
eroman
2015/03/26 03:50:49
It is essentially an equality check.
std::lower_b
|
+ return *it; |
+ return NULL; |
Ryan Sleevi
2015/03/25 06:16:37
nullptr
eroman
2015/03/26 03:50:49
Done.
|
+} |
+ |
+scoped_ptr<CertNetFetcher::Job> CertNetFetcher::RemoveJob(Job* job) { |
+ DCHECK(thread_checker_.CalledOnValidThread()); |
+ bool erased_job = jobs_.erase(job) == 1; |
+ DCHECK(erased_job); |
+ return scoped_ptr<Job>(job); |
+} |
+ |
+void CertNetFetcher::SetCurrentlyCompletingJob(Job* job) { |
+ currently_completing_job_ = job; |
+} |
+ |
+} // namespace net |