Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(477)

Unified Diff: net/url_request/sdch_dictionary_fetcher_unittest.cc

Issue 881413003: Make SDCH dictionaries persistent across browser restart. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: First round of persistence tests. Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « net/url_request/sdch_dictionary_fetcher.cc ('k') | tools/metrics/histograms/histograms.xml » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: net/url_request/sdch_dictionary_fetcher_unittest.cc
diff --git a/net/url_request/sdch_dictionary_fetcher_unittest.cc b/net/url_request/sdch_dictionary_fetcher_unittest.cc
index 460fc5c44dd10aabfd4c46242faad68163021d81..a1a158d80af2a58c2647f8a1b799bc63a8d87d81 100644
--- a/net/url_request/sdch_dictionary_fetcher_unittest.cc
+++ b/net/url_request/sdch_dictionary_fetcher_unittest.cc
@@ -11,7 +11,9 @@
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "base/thread_task_runner_handle.h"
+#include "net/base/load_flags.h"
#include "net/base/sdch_manager.h"
+#include "net/http/http_response_headers.h"
#include "net/url_request/url_request_data_job.h"
#include "net/url_request/url_request_filter.h"
#include "net/url_request/url_request_interceptor.h"
@@ -28,10 +30,17 @@ const char kTestDomain[] = "top.domain.test";
class URLRequestSpecifiedResponseJob : public URLRequestSimpleJob {
public:
- URLRequestSpecifiedResponseJob(URLRequest* request,
- NetworkDelegate* network_delegate,
- const base::Closure& destruction_callback)
+ // Called on destruction with load flags used for this request.
+ typedef base::Callback<void(int)> DestructionCallback;
+
+ URLRequestSpecifiedResponseJob(
+ URLRequest* request,
+ NetworkDelegate* network_delegate,
+ const HttpResponseInfo& response_info_to_return,
+ const DestructionCallback& destruction_callback)
: URLRequestSimpleJob(request, network_delegate),
+ response_info_to_return_(response_info_to_return),
+ last_load_flags_seen_(0),
destruction_callback_(destruction_callback) {
DCHECK(!destruction_callback.is_null());
}
@@ -43,8 +52,15 @@ class URLRequestSpecifiedResponseJob : public URLRequestSimpleJob {
url.spec().c_str());
}
+ // URLRequestJob
+ void GetResponseInfo(HttpResponseInfo* info) override {
+ *info = response_info_to_return_;
+ }
+
private:
- ~URLRequestSpecifiedResponseJob() override { destruction_callback_.Run(); }
+ ~URLRequestSpecifiedResponseJob() override {
+ destruction_callback_.Run(last_load_flags_seen_);
+ }
// URLRequestSimpleJob implementation:
int GetData(std::string* mime_type,
@@ -53,22 +69,35 @@ class URLRequestSpecifiedResponseJob : public URLRequestSimpleJob {
const CompletionCallback& callback) const override {
GURL url(request_->url());
*data = ExpectedResponseForURL(url);
+ last_load_flags_seen_ = request_->load_flags();
return OK;
}
- const base::Closure destruction_callback_;
+ const HttpResponseInfo response_info_to_return_;
+ mutable int last_load_flags_seen_;
+ const DestructionCallback destruction_callback_;
};
class SpecifiedResponseJobInterceptor : public URLRequestInterceptor {
public:
// A callback to be called whenever a URLRequestSpecifiedResponseJob is
- // created or destroyed. The argument will be the change in number of
+ // created or destroyed. The first argument will be the change in number of
// jobs (i.e. +1 for created, -1 for destroyed).
- typedef base::Callback<void(int outstanding_job_delta)> LifecycleCallback;
-
- explicit SpecifiedResponseJobInterceptor(
- const LifecycleCallback& lifecycle_callback)
- : lifecycle_callback_(lifecycle_callback), factory_(this) {
+ // The second argument will be undefined if the job is being created,
+ // and will contain the load flags passed to the request the
+ // job was created for if the job is being destroyed.
+ typedef base::Callback<void(int outstanding_job_delta,
+ int destruction_load_flags)> LifecycleCallback;
+
+ // |*info| will be returned from all child URLRequestSpecifiedResponseJobs.
+ // Note that: a) this pointer is shared with the caller, and the caller must
+ // guarantee that |*info| outlives the SpecifiedResponseJobInterceptor, and
+ // b) |*info| is mutable, and changes to should propagate to
+ // URLRequestSpecifiedResponseJobs created after any change.
+ SpecifiedResponseJobInterceptor(HttpResponseInfo* http_response_info,
+ const LifecycleCallback& lifecycle_callback)
+ : http_response_info_(http_response_info),
+ lifecycle_callback_(lifecycle_callback) {
DCHECK(!lifecycle_callback_.is_null());
}
~SpecifiedResponseJobInterceptor() override {}
@@ -76,19 +105,21 @@ class SpecifiedResponseJobInterceptor : public URLRequestInterceptor {
URLRequestJob* MaybeInterceptRequest(
URLRequest* request,
NetworkDelegate* network_delegate) const override {
- if (!lifecycle_callback_.is_null())
- lifecycle_callback_.Run(1);
+ lifecycle_callback_.Run(1, 0);
return new URLRequestSpecifiedResponseJob(
- request, network_delegate, base::Bind(lifecycle_callback_, -1));
+ request, network_delegate, *http_response_info_,
+ base::Bind(lifecycle_callback_, -1));
}
- // The caller must ensure that the callback is valid to call for the
- // lifetime of the SpecifiedResponseJobInterceptor (i.e. until
- // Unregister() is called).
- static void RegisterWithFilter(const LifecycleCallback& lifecycle_callback) {
+ // The caller must ensure that both |*http_response_info| and the
+ // callback remain valid for the lifetime of the
+ // SpecifiedResponseJobInterceptor (i.e. until Unregister() is called).
+ static void RegisterWithFilter(HttpResponseInfo* http_response_info,
+ const LifecycleCallback& lifecycle_callback) {
scoped_ptr<SpecifiedResponseJobInterceptor> interceptor(
- new SpecifiedResponseJobInterceptor(lifecycle_callback));
+ new SpecifiedResponseJobInterceptor(http_response_info,
+ lifecycle_callback));
net::URLRequestFilter::GetInstance()->AddHostnameInterceptor(
"http", kTestDomain, interceptor.Pass());
@@ -100,26 +131,32 @@ class SpecifiedResponseJobInterceptor : public URLRequestInterceptor {
}
private:
- void OnSpecfiedResponseJobDestruction() const {
- if (!lifecycle_callback_.is_null())
- lifecycle_callback_.Run(-1);
- }
-
+ HttpResponseInfo* http_response_info_;
LifecycleCallback lifecycle_callback_;
- mutable base::WeakPtrFactory<SpecifiedResponseJobInterceptor> factory_;
};
// Local test infrastructure
// * URLRequestSpecifiedResponseJob: A URLRequestJob that returns
// a different but derivable response for each URL (used for all
-// url requests in this file). The class provides interfaces to
-// signal whenever the total number of jobs transitions to zero.
-// * SdchDictionaryFetcherTest: Registers a callback with the above
-// class, and provides blocking interfaces for a transition to zero jobs.
-// Contains an SdchDictionaryFetcher, and tracks fetcher dictionary
-// addition callbacks.
-// Most tests schedule a dictionary fetch, wait for no jobs outstanding,
-// then test that the fetch results are as expected.
+// url requests in this file). This class is initialized with
+// the HttpResponseInfo to return (if any), as well as a callback
+// that is called when the class is destroyed. That callback
+// takes as arguemnt the load flags used for the request the
+// job was created for.
+// * SpecifiedResponseJobInterceptor: This class is a
+// URLRequestInterceptor that generates the class above. It is constructed
+// with a pointer to the (mutable) resposne info that should be
+// returned from the URLRequestSpecifiedResponseJob children, as well as
+// a callback that is run when URLRequestSpecifiedResponseJobs are
+// created or destroyed.
+// * SdchDictionaryFetcherTest: This class registers the above interceptor,
+// tracks the number of jobs requested and the subset of those
+// that are still outstanding. It exports an interface to wait until there
+// are no jobs outstanding. It shares an HttpResponseInfo structure
+// with the SpecifiedResponseJobInterceptor to control the response
+// information returned by the jbos.
+// The standard pattern for tests is to schedule a dictionary fetch, wait
+// for no jobs outstanding, then test that the fetch results are as expected.
class SdchDictionaryFetcherTest : public ::testing::Test {
public:
@@ -135,13 +172,21 @@ class SdchDictionaryFetcherTest : public ::testing::Test {
SdchDictionaryFetcherTest()
: jobs_requested_(0),
jobs_outstanding_(0),
- context_(new TestURLRequestContext),
- fetcher_(new SdchDictionaryFetcher(
- context_.get(),
+ last_load_flags_seen_(0),
+ last_tag_(0),
+ default_callback_(
base::Bind(&SdchDictionaryFetcherTest::OnDictionaryFetched,
- base::Unretained(this)))),
+ // SdchDictionaryFetcherTest will outlast its
+ // member variables.
+ base::Unretained(this),
+ 0)),
+ context_(new TestURLRequestContext),
+ fetcher_(new SdchDictionaryFetcher(context_.get())),
factory_(this) {
+ response_info_to_return_.request_time = base::Time::Now();
+ response_info_to_return_.response_time = base::Time::Now();
SpecifiedResponseJobInterceptor::RegisterWithFilter(
+ &response_info_to_return_,
base::Bind(&SdchDictionaryFetcherTest::OnNumberJobsChanged,
factory_.GetWeakPtr()));
}
@@ -150,11 +195,13 @@ class SdchDictionaryFetcherTest : public ::testing::Test {
SpecifiedResponseJobInterceptor::Unregister();
}
- void OnDictionaryFetched(const std::string& dictionary_text,
+ void OnDictionaryFetched(int tag,
+ const std::string& dictionary_text,
const GURL& dictionary_url,
const BoundNetLog& net_log) {
dictionary_additions.push_back(
DictionaryAdditions(dictionary_text, dictionary_url));
+ last_tag_ = tag;
}
// Return (in |*out|) all dictionary additions since the last time
@@ -169,7 +216,7 @@ class SdchDictionaryFetcherTest : public ::testing::Test {
// May not be called outside the SetUp()/TearDown() interval.
int jobs_requested() const { return jobs_requested_; }
- GURL PathToGurl(const char* path) {
+ GURL PathToGurl(const char* path) const {
std::string gurl_string("http://");
gurl_string += kTestDomain;
gurl_string += "/";
@@ -187,10 +234,29 @@ class SdchDictionaryFetcherTest : public ::testing::Test {
run_loop_.reset();
}
+ HttpResponseInfo* response_info_to_return() {
+ return &response_info_to_return_;
+ }
+
+ // Simplified wrapper around fetcher()->Schedule().
+ void Schedule(const GURL& dictionary_url) {
+ fetcher()->Schedule(dictionary_url, default_callback_);
+ }
+
+ int last_load_flags_seen() const { return last_load_flags_seen_; }
+ int last_tag() const { return last_tag_; }
+
+ const SdchDictionaryFetcher::OnDictionaryFetchedCallback& default_callback() {
+ return default_callback_;
+ }
+
private:
- void OnNumberJobsChanged(int outstanding_jobs_delta) {
+ void OnNumberJobsChanged(int outstanding_jobs_delta, int load_flags) {
+ DCHECK_NE(0, outstanding_jobs_delta);
if (outstanding_jobs_delta > 0)
jobs_requested_ += outstanding_jobs_delta;
+ else
+ last_load_flags_seen_ = load_flags;
jobs_outstanding_ += outstanding_jobs_delta;
if (jobs_outstanding_ == 0 && run_loop_)
run_loop_->Quit();
@@ -198,17 +264,42 @@ class SdchDictionaryFetcherTest : public ::testing::Test {
int jobs_requested_;
int jobs_outstanding_;
+ int last_load_flags_seen_;
+ int last_tag_;
+ const SdchDictionaryFetcher::OnDictionaryFetchedCallback default_callback_;
scoped_ptr<base::RunLoop> run_loop_;
scoped_ptr<TestURLRequestContext> context_;
scoped_ptr<SdchDictionaryFetcher> fetcher_;
std::vector<DictionaryAdditions> dictionary_additions;
+ HttpResponseInfo response_info_to_return_;
base::WeakPtrFactory<SdchDictionaryFetcherTest> factory_;
};
// Schedule a fetch and make sure it happens.
TEST_F(SdchDictionaryFetcherTest, Basic) {
GURL dictionary_url(PathToGurl("dictionary"));
- fetcher()->Schedule(dictionary_url);
+ Schedule(dictionary_url);
+ WaitForNoJobs();
+
+ EXPECT_EQ(1, jobs_requested());
+ std::vector<DictionaryAdditions> additions;
+ GetDictionaryAdditions(&additions);
+ ASSERT_EQ(1u, additions.size());
+ EXPECT_EQ(
+ URLRequestSpecifiedResponseJob::ExpectedResponseForURL(dictionary_url),
+ additions[0].dictionary_text);
+ EXPECT_FALSE(last_load_flags_seen() & LOAD_ONLY_FROM_CACHE);
+}
+
+// Confirm that if Schedule is called with a specific callback, it makes it all
+// the way through.
+TEST_F(SdchDictionaryFetcherTest, SpecializedCallback) {
+ const int kExtraDataTag = 0xb0e0e0f0;
+ GURL dictionary_url(PathToGurl("dictionary"));
+ fetcher()->Schedule(
+ dictionary_url,
+ base::Bind(&SdchDictionaryFetcherTest::OnDictionaryFetched,
+ base::Unretained(this), kExtraDataTag));
WaitForNoJobs();
EXPECT_EQ(1, jobs_requested());
@@ -218,14 +309,16 @@ TEST_F(SdchDictionaryFetcherTest, Basic) {
EXPECT_EQ(
URLRequestSpecifiedResponseJob::ExpectedResponseForURL(dictionary_url),
additions[0].dictionary_text);
+ EXPECT_FALSE(last_load_flags_seen() & LOAD_ONLY_FROM_CACHE);
+ EXPECT_EQ(kExtraDataTag, last_tag());
}
// Multiple fetches of the same URL should result in only one request.
TEST_F(SdchDictionaryFetcherTest, Multiple) {
GURL dictionary_url(PathToGurl("dictionary"));
- fetcher()->Schedule(dictionary_url);
- fetcher()->Schedule(dictionary_url);
- fetcher()->Schedule(dictionary_url);
+ Schedule(dictionary_url);
+ Schedule(dictionary_url);
+ Schedule(dictionary_url);
WaitForNoJobs();
EXPECT_EQ(1, jobs_requested());
@@ -243,9 +336,9 @@ TEST_F(SdchDictionaryFetcherTest, Cancel) {
GURL dictionary_url_2(PathToGurl("dictionary_2"));
GURL dictionary_url_3(PathToGurl("dictionary_3"));
- fetcher()->Schedule(dictionary_url_1);
- fetcher()->Schedule(dictionary_url_2);
- fetcher()->Schedule(dictionary_url_3);
+ Schedule(dictionary_url_1);
+ Schedule(dictionary_url_2);
+ Schedule(dictionary_url_3);
fetcher()->Cancel();
WaitForNoJobs();
@@ -258,8 +351,8 @@ TEST_F(SdchDictionaryFetcherTest, Cancel) {
TEST_F(SdchDictionaryFetcherTest, LoopRace) {
GURL dictionary0_url(PathToGurl("dictionary0"));
GURL dictionary1_url(PathToGurl("dictionary1"));
- fetcher()->Schedule(dictionary0_url);
- fetcher()->Schedule(dictionary1_url);
+ Schedule(dictionary0_url);
+ Schedule(dictionary1_url);
WaitForNoJobs();
ASSERT_EQ(2, jobs_requested());
@@ -274,6 +367,54 @@ TEST_F(SdchDictionaryFetcherTest, LoopRace) {
additions[1].dictionary_text);
}
+TEST_F(SdchDictionaryFetcherTest, ScheduleReloadLoadFlags) {
+ GURL dictionary_url(PathToGurl("dictionary"));
+ fetcher()->ScheduleReload(dictionary_url, default_callback());
+
+ WaitForNoJobs();
+ EXPECT_EQ(1, jobs_requested());
+ std::vector<DictionaryAdditions> additions;
+ GetDictionaryAdditions(&additions);
+ ASSERT_EQ(1u, additions.size());
+ EXPECT_EQ(
+ URLRequestSpecifiedResponseJob::ExpectedResponseForURL(dictionary_url),
+ additions[0].dictionary_text);
+ EXPECT_TRUE(last_load_flags_seen() & LOAD_ONLY_FROM_CACHE);
+}
+
+TEST_F(SdchDictionaryFetcherTest, ScheduleReloadFresh) {
+ response_info_to_return()->headers = new HttpResponseHeaders("");
+ response_info_to_return()->headers->AddHeader("Cache-Control: max-age=1000");
+
+ GURL dictionary_url(PathToGurl("dictionary"));
+ fetcher()->ScheduleReload(dictionary_url, default_callback());
+
+ WaitForNoJobs();
+ EXPECT_EQ(1, jobs_requested());
+ std::vector<DictionaryAdditions> additions;
+ GetDictionaryAdditions(&additions);
+ ASSERT_EQ(1u, additions.size());
+ EXPECT_EQ(
+ URLRequestSpecifiedResponseJob::ExpectedResponseForURL(dictionary_url),
+ additions[0].dictionary_text);
+ EXPECT_TRUE(last_load_flags_seen() & LOAD_ONLY_FROM_CACHE);
+}
+
+TEST_F(SdchDictionaryFetcherTest, ScheduleReloadStale) {
+ response_info_to_return()->headers = new HttpResponseHeaders("");
+ response_info_to_return()->headers->AddHeader("Cache-Control: no-cache");
+
+ GURL dictionary_url(PathToGurl("dictionary"));
+ fetcher()->ScheduleReload(dictionary_url, default_callback());
+
+ WaitForNoJobs();
+ EXPECT_EQ(1, jobs_requested());
+ std::vector<DictionaryAdditions> additions;
+ GetDictionaryAdditions(&additions);
+ ASSERT_EQ(0u, additions.size());
+ EXPECT_TRUE(last_load_flags_seen() & LOAD_ONLY_FROM_CACHE);
+}
+
} // namespace
} // namespace net
« no previous file with comments | « net/url_request/sdch_dictionary_fetcher.cc ('k') | tools/metrics/histograms/histograms.xml » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698