Chromium Code Reviews| Index: net/cert/cert_net_fetcher_unittest.cc |
| diff --git a/net/cert/cert_net_fetcher_unittest.cc b/net/cert/cert_net_fetcher_unittest.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..c65e5f980030dca7fa8cf9d0373843619bf235c0 |
| --- /dev/null |
| +++ b/net/cert/cert_net_fetcher_unittest.cc |
| @@ -0,0 +1,448 @@ |
| +// Copyright 2015 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "net/cert/cert_net_fetcher.h" |
| + |
| +#include <string> |
| + |
| +#include "base/compiler_specific.h" |
| +#include "base/run_loop.h" |
| +#include "net/cert/mock_cert_verifier.h" |
| +#include "net/dns/mock_host_resolver.h" |
| +#include "net/http/http_server_properties_impl.h" |
| +#include "net/test/spawned_test_server/spawned_test_server.h" |
| +#include "net/url_request/url_request_job_factory_impl.h" |
| +#include "net/url_request/url_request_test_util.h" |
| +#include "testing/gtest/include/gtest/gtest.h" |
| +#include "testing/platform_test.h" |
| + |
| +// TODO(eroman): Test that cookies aren't sent. |
| +// TODO(eroman): Request de-duplication |
| +// TODO(eroman): Cancel duplicated requests within a callback |
| +// TODO(eroman): Start requests for the same job within a callback |
| +// TODO(eroman): Delete the CertNetFetcher within callback |
| + |
| +using base::ASCIIToUTF16; |
| + |
| +namespace net { |
| + |
| +namespace { |
| + |
| +const base::FilePath::CharType kDocRoot[] = |
| + FILE_PATH_LITERAL("net/data/cert_net_fetcher_unittest"); |
| + |
| +// A non-mock URLRequestContext which can access http:// urls. |
| +class RequestContext : public URLRequestContext { |
| + public: |
| + RequestContext() : storage_(this) { |
| + ProxyConfig no_proxy; |
| + storage_.set_host_resolver(scoped_ptr<HostResolver>(new MockHostResolver)); |
| + storage_.set_cert_verifier(new MockCertVerifier); |
| + storage_.set_transport_security_state(new TransportSecurityState); |
| + storage_.set_proxy_service(ProxyService::CreateFixed(no_proxy)); |
| + storage_.set_ssl_config_service(new SSLConfigServiceDefaults); |
| + storage_.set_http_server_properties( |
| + scoped_ptr<HttpServerProperties>(new HttpServerPropertiesImpl())); |
| + |
| + HttpNetworkSession::Params params; |
| + params.host_resolver = host_resolver(); |
| + params.cert_verifier = cert_verifier(); |
| + params.transport_security_state = transport_security_state(); |
| + params.proxy_service = proxy_service(); |
| + params.ssl_config_service = ssl_config_service(); |
| + params.http_server_properties = http_server_properties(); |
| + scoped_refptr<HttpNetworkSession> network_session( |
| + new HttpNetworkSession(params)); |
| + storage_.set_http_transaction_factory(new HttpCache( |
| + network_session.get(), HttpCache::DefaultBackend::InMemory(0))); |
| + URLRequestJobFactoryImpl* job_factory = new URLRequestJobFactoryImpl(); |
| + storage_.set_job_factory(job_factory); |
| + } |
| + |
| + ~RequestContext() override { AssertNoURLRequests(); } |
| + |
| + private: |
| + URLRequestContextStorage storage_; |
| +}; |
| + |
| +class FetchResult { |
| + public: |
| + FetchResult(int net_error, const std::vector<uint8_t>& response_body) |
| + : net_error_(net_error), response_body_(response_body) {} |
| + |
| + void VerifySuccess(const std::string& expected_body) { |
| + EXPECT_EQ(OK, net_error_); |
| + EXPECT_EQ(expected_body, |
| + std::string(response_body_.begin(), response_body_.end())); |
| + } |
| + |
| + void VerifyFailure(int expected_error) { |
| + EXPECT_EQ(expected_error, net_error_); |
| + EXPECT_EQ(0u, response_body_.size()); |
| + } |
| + |
| + private: |
| + const int net_error_; |
| + const std::vector<uint8_t> response_body_; |
| +}; |
| + |
| +// Helper to synchronously wait for the fetch completion. This is similar to |
| +// net's TestCompletionCallback, but built around FetchCallback. |
| +class TestFetchCallback { |
| + public: |
| + TestFetchCallback() |
| + : callback_(base::Bind(&TestFetchCallback::OnCallback, |
| + base::Unretained(this))) {} |
| + |
| + const CertNetFetcher::FetchCallback& callback() const { return callback_; } |
| + |
| + scoped_ptr<FetchResult> WaitForResult() { |
| + DCHECK(quit_closure_.is_null()); |
| + while (!HasResult()) { |
| + base::RunLoop run_loop; |
| + quit_closure_ = run_loop.QuitClosure(); |
| + run_loop.Run(); |
| + quit_closure_.Reset(); |
| + } |
| + return result_.Pass(); |
| + } |
| + |
| + bool HasResult() const { return result_.get(); } |
| + |
| + private: |
| + void OnCallback(int net_error, const std::vector<uint8_t>& response_body) { |
| + DCHECK(!HasResult()); |
| + result_.reset(new FetchResult(net_error, response_body)); |
| + if (!quit_closure_.is_null()) |
| + quit_closure_.Run(); |
| + } |
| + |
| + CertNetFetcher::FetchCallback callback_; |
| + scoped_ptr<FetchResult> result_; |
| + base::Closure quit_closure_; |
| +}; |
| + |
| +} // namespace |
| + |
| +class CertNetFetcherTest : public PlatformTest { |
| + public: |
| + CertNetFetcherTest() |
| + : test_server_(SpawnedTestServer::TYPE_HTTP, |
| + net::SpawnedTestServer::kLocalhost, |
| + base::FilePath(kDocRoot)) {} |
| + |
| + protected: |
| + SpawnedTestServer test_server_; |
| + RequestContext context_; |
| +}; |
| + |
| +// Helper to start an AIA fetch using default parameters. |
| +CertNetFetcher::RequestId StartRequest(const GURL& url, |
| + const TestFetchCallback& callback, |
| + CertNetFetcher* fetcher) { |
| + return fetcher->FetchCaIssuers(url, CertNetFetcher::DEFAULT, |
| + CertNetFetcher::DEFAULT, callback.callback()); |
| +} |
| + |
| +// Fetch a few unique URLs using GET in parallel. Each URL has a different body |
| +// and Content-Type. |
| +TEST_F(CertNetFetcherTest, ParallelFetchNoDupes) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + TestFetchCallback callback1; |
| + TestFetchCallback callback2; |
| + TestFetchCallback callback3; |
| + |
| + // Request a URL with Content-Type "application/pkix-cert" |
| + GURL url1 = test_server_.GetURL("files/cert.crt"); |
| + StartRequest(url1, callback1, &fetcher); |
| + |
| + // Request a URL with Content-Type "application/pkix-crl" |
| + GURL url2 = test_server_.GetURL("files/root.crl"); |
| + StartRequest(url2, callback2, &fetcher); |
| + |
| + // Request a URL with Content-Type "application/pkcs7-mime" |
| + GURL url3 = test_server_.GetURL("files/certs.p7c"); |
| + StartRequest(url3, callback3, &fetcher); |
| + |
| + // Wait for all of the requests to complete. |
| + scoped_ptr<FetchResult> result1 = callback1.WaitForResult(); |
| + scoped_ptr<FetchResult> result2 = callback2.WaitForResult(); |
| + scoped_ptr<FetchResult> result3 = callback3.WaitForResult(); |
| + |
| + // Verify the fetch results. |
| + result1->VerifySuccess("-cert.crt-\n"); |
| + result2->VerifySuccess("-root.crl-\n"); |
| + result3->VerifySuccess("-certs.p7c-\n"); |
| +} |
| + |
| +// Fetch a caIssuers URL which has an unexpected extension and Content-Type. |
| +// The extension is .txt and the Content-Type is text/plain. Despite being |
| +// unusual this succeeds as the extension and Content-Type are not required to |
| +// be meaningful. |
| +TEST_F(CertNetFetcherTest, ContentTypeDoesntMatter) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + TestFetchCallback callback; |
| + GURL url = test_server_.GetURL("files/foo.txt"); |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifySuccess("-foo.txt-\n"); |
| +} |
| + |
| +// Fetch a URLs whose HTTP response code is not 200. These are considered |
| +// failures. |
| +TEST_F(CertNetFetcherTest, HttpStatusCode) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + // Response was HTTP status 404. |
| + { |
| + TestFetchCallback callback; |
| + GURL url = test_server_.GetURL("files/404.html"); |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_FAILED); |
| + } |
| + |
| + // Response was HTTP status 500. |
| + { |
| + TestFetchCallback callback; |
| + GURL url = test_server_.GetURL("files/500.html"); |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_FAILED); |
| + } |
| +} |
| + |
| +// Fetching a URL with a Content-Disposition header should have no effect. |
| +TEST_F(CertNetFetcherTest, ContentDisposition) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + TestFetchCallback callback; |
| + GURL url = test_server_.GetURL("files/downloadable.js"); |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifySuccess("-downloadable.js-\n"); |
| +} |
| + |
| +// Verifies that a cachable request will be served from the HTTP cache the |
| +// second time it is requested. |
| +TEST_F(CertNetFetcherTest, Cache) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + // Fetch a URL whose HTTP headers make it cacheable for 1 hour. |
| + GURL url(test_server_.GetURL("files/cacheable_1hr.crt")); |
| + { |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifySuccess("-cacheable_1hr.crt-\n"); |
| + } |
| + |
| + // Kill the HTTP server. |
| + ASSERT_TRUE(test_server_.Stop()); |
| + |
| + // Fetch again -- will fail unless served from cache. |
| + { |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifySuccess("-cacheable_1hr.crt-\n"); |
| + } |
| +} |
| + |
| +// Verify that the maximum response body constraints are enforced by fetching a |
| +// resource that is larger than the limit. |
| +TEST_F(CertNetFetcherTest, TooLarge) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + // This file has a response body 12 bytes long. So setting the maximum to 11 |
| + // bytes will cause it to fail. |
| + GURL url(test_server_.GetURL("files/certs.p7c")); |
| + TestFetchCallback callback; |
| + fetcher.FetchCaIssuers(url, CertNetFetcher::DEFAULT, 11, callback.callback()); |
| + |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_FILE_TOO_BIG); |
| +} |
| + |
| +// Set the timeout to 10 milliseconds, and try fetching a URL that takes 5 |
| +// seconds to complete. It should fail due to a timeout. |
| +TEST_F(CertNetFetcherTest, Hang) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url(test_server_.GetURL("slow/certs.p7c?5.1")); |
| + TestFetchCallback callback; |
| + fetcher.FetchCaIssuers(url, 10, CertNetFetcher::DEFAULT, callback.callback()); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_TIMED_OUT); |
| +} |
| + |
| +// Verify that if a response is gzip-encoded it gets inflated before being |
| +// returned to the caller. |
| +TEST_F(CertNetFetcherTest, Gzip) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url(test_server_.GetURL("files/gzipped_crl")); |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifySuccess("-gzipped_crl-\n"); |
| +} |
| + |
| +// Try fetching an unsupported URL scheme (https). |
| +TEST_F(CertNetFetcherTest, HttpsNotAllowed) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url("https://foopy/foo.crt"); |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + // Should NOT complete synchronously despite being a test that could be done |
| + // immediately. |
| + EXPECT_FALSE(callback.HasResult()); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_DISALLOWED_URL_SCHEME); |
| +} |
| + |
| +// Try fetching a URL which redirects to https. |
| +TEST_F(CertNetFetcherTest, RedirectToHttpsNotAllowed) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url(test_server_.GetURL("files/redirect_https")); |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + scoped_ptr<FetchResult> result = callback.WaitForResult(); |
| + result->VerifyFailure(ERR_DISALLOWED_URL_SCHEME); |
| +} |
| + |
| +// Try fetching an unsupported URL scheme (https) and then immediately |
| +// cancelling. This is a bit special because this codepath needs to post a task. |
| +TEST_F(CertNetFetcherTest, CancelHttpsNotAllowed) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url("https://foopy/foo.crt"); |
| + TestFetchCallback callback; |
| + CertNetFetcher::RequestId id = StartRequest(url, callback, &fetcher); |
| + |
| + // Should NOT complete synchronously despite being a test that could be done |
| + // immediately. |
| + EXPECT_FALSE(callback.HasResult()); |
| + |
| + fetcher.CancelRequest(id); |
| +} |
| + |
| +// Start a few requests, and cancel one of them before running the message loop |
| +// again. |
| +TEST_F(CertNetFetcherTest, CancelBeforeRunningMessageLoop) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + TestFetchCallback callback1; |
| + TestFetchCallback callback2; |
| + TestFetchCallback callback3; |
| + |
| + GURL url1 = test_server_.GetURL("files/cert.crt"); |
| + StartRequest(url1, callback1, &fetcher); |
| + |
| + GURL url2 = test_server_.GetURL("files/root.crl"); |
| + CertNetFetcher::RequestId id2 = StartRequest(url2, callback2, &fetcher); |
| + |
| + GURL url3 = test_server_.GetURL("files/certs.p7c"); |
| + StartRequest(url3, callback3, &fetcher); |
| + |
| + EXPECT_FALSE(callback1.HasResult()); |
| + EXPECT_FALSE(callback2.HasResult()); |
| + EXPECT_FALSE(callback3.HasResult()); |
| + |
| + // Cancel the second request. |
| + fetcher.CancelRequest(id2); |
| + |
| + // Wait for the non-cancelled requests to complete. |
| + scoped_ptr<FetchResult> result1 = callback1.WaitForResult(); |
| + scoped_ptr<FetchResult> result3 = callback3.WaitForResult(); |
| + |
| + // Verify the fetch results. |
| + result1->VerifySuccess("-cert.crt-\n"); |
| + result3->VerifySuccess("-certs.p7c-\n"); |
| + |
| + EXPECT_FALSE(callback2.HasResult()); |
| +} |
| + |
| +// Start several requests, and cancel one of them after the first has completed. |
| +// NOTE: The python test server is single threaded and can service. After a |
|
Ryan Sleevi
2015/02/23 20:25:22
incomplete sentence?
eroman
2015/02/23 23:36:58
Fixed.
|
| +// socket is opened to it any following request will hang until it is closed. |
| +// Cancelling the first request therefore can be problematic, since if |
| +// cancellation is done after the socket is opened but before reading/writing, |
| +// then the socket is re-cycled and things will be stalled until the cleanup |
| +// timer (10 seconds) closes it. |
| +TEST_F(CertNetFetcherTest, CancelAfterRunningMessageLoop) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + TestFetchCallback callback1; |
| + TestFetchCallback callback2; |
| + TestFetchCallback callback3; |
| + |
| + GURL url1 = test_server_.GetURL("files/cert.crt"); |
| + StartRequest(url1, callback1, &fetcher); |
| + |
| + GURL url2 = test_server_.GetURL("files/certs.p7c"); |
| + CertNetFetcher::RequestId id2 = StartRequest(url2, callback2, &fetcher); |
| + |
| + GURL url3("ftp://www.not.supported.com/foo"); |
| + StartRequest(url3, callback3, &fetcher); |
| + |
| + EXPECT_FALSE(callback1.HasResult()); |
| + EXPECT_FALSE(callback2.HasResult()); |
| + EXPECT_FALSE(callback3.HasResult()); |
| + |
| + // Wait for the fast request to complete. |
| + scoped_ptr<FetchResult> result3 = callback3.WaitForResult(); |
| + result3->VerifyFailure(ERR_DISALLOWED_URL_SCHEME); |
| + |
| + // Cancel the second outstanding request. |
| + fetcher.CancelRequest(id2); |
| + |
| + // Wait for the first request to complete. |
| + scoped_ptr<FetchResult> result2 = callback1.WaitForResult(); |
| + |
| + // Verify the fetch results. |
| + result2->VerifySuccess("-cert.crt-\n"); |
| +} |
| + |
| +// Delete a CertNetFetcher with outstanding requests on it. |
| +TEST_F(CertNetFetcherTest, DeleteCancels) { |
| + ASSERT_TRUE(test_server_.Start()); |
| + |
| + CertNetFetcher fetcher(&context_); |
| + |
| + GURL url(test_server_.GetURL("slow/certs.p7c?20.1")); |
| + TestFetchCallback callback; |
| + StartRequest(url, callback, &fetcher); |
| + |
| + // Note that the request is never completed, nor cancelled. |
| +} |
| + |
|
Ryan Sleevi
2015/02/23 20:25:22
Can you add a test for:
Two requests for same URL
eroman
2015/02/23 23:36:58
I had sent these tests as a follow-up change (http
|
| +} // namespace net |