Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(139)

Side by Side Diff: chrome/browser/net/passive_log_collector_unittest.cc

Issue 848006: Generalize the net module's LoadLog facility from a passive container, to an event stream (NetLog). (Closed) Base URL: svn://chrome-svn/chrome/trunk/src/
Patch Set: Split up RequestTracker into ConnectJobTracker+RequestTracker+RequestTrackerBase, address comments Created 10 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2006-2009 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/url_request/request_tracker.h" 5 #include "chrome/browser/net/passive_log_collector.h"
6 6
7 #include "base/compiler_specific.h" 7 #include "base/compiler_specific.h"
8 #include "base/format_macros.h" 8 #include "base/format_macros.h"
9 #include "base/string_util.h" 9 #include "base/string_util.h"
10 #include "testing/gtest/include/gtest/gtest.h" 10 #include "testing/gtest/include/gtest/gtest.h"
11 11
12 namespace { 12 namespace {
13 13
14 typedef PassiveLogCollector::RequestTracker RequestTracker;
15 typedef PassiveLogCollector::RequestInfoList RequestInfoList;
16
17 const net::NetLog::SourceType kSourceType = net::NetLog::SOURCE_NONE;
18
19 net::NetLog::Entry MakeStartLogEntryWithURL(int source_id,
20 const std::string& url) {
21 net::NetLog::Entry entry;
22 entry.source.type = kSourceType;
23 entry.source.id = source_id;
24 entry.type = net::NetLog::Entry::TYPE_EVENT;
25 entry.event = net::NetLog::Event(net::NetLog::TYPE_REQUEST_ALIVE,
26 net::NetLog::PHASE_BEGIN);
27 entry.string = url;
28 return entry;
29 }
30
31 net::NetLog::Entry MakeStartLogEntry(int source_id) {
32 return MakeStartLogEntryWithURL(source_id,
33 StringPrintf("http://req%d", source_id));
34 }
35
36 net::NetLog::Entry MakeEndLogEntry(int source_id) {
37 net::NetLog::Entry entry;
38 entry.source.type = kSourceType;
39 entry.source.id = source_id;
40 entry.type = net::NetLog::Entry::TYPE_EVENT;
41 entry.event = net::NetLog::Event(net::NetLog::TYPE_REQUEST_ALIVE,
42 net::NetLog::PHASE_END);
43 return entry;
44 }
45
14 static const int kMaxNumLoadLogEntries = 1; 46 static const int kMaxNumLoadLogEntries = 1;
15 47
16 class TestRequest {
17 public:
18 explicit TestRequest(const GURL& url)
19 : url_(url),
20 load_log_(new net::LoadLog(kMaxNumLoadLogEntries)),
21 ALLOW_THIS_IN_INITIALIZER_LIST(request_tracker_node_(this)) {}
22 ~TestRequest() {}
23
24 // This method is used in RequestTrackerTest::Basic test.
25 const GURL& original_url() const { return url_; }
26
27 private:
28 // RequestTracker<T> will access GetRecentRequestInfo() and
29 // |request_tracker_node_|.
30 friend class RequestTracker<TestRequest>;
31
32 void GetInfoForTracker(
33 RequestTracker<TestRequest>::RecentRequestInfo* info) const {
34 info->original_url = url_;
35 info->load_log = load_log_;
36 }
37
38 const GURL url_;
39 scoped_refptr<net::LoadLog> load_log_;
40
41 RequestTracker<TestRequest>::Node request_tracker_node_;
42
43 DISALLOW_COPY_AND_ASSIGN(TestRequest);
44 };
45
46
47 TEST(RequestTrackerTest, BasicBounded) { 48 TEST(RequestTrackerTest, BasicBounded) {
48 RequestTracker<TestRequest> tracker; 49 RequestTracker tracker(NULL);
49 EXPECT_FALSE(tracker.IsUnbounded()); 50 EXPECT_FALSE(tracker.IsUnbounded());
50 EXPECT_EQ(0u, tracker.GetLiveRequests().size()); 51 EXPECT_EQ(0u, tracker.GetLiveRequests().size());
51 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size()); 52 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
52 53
53 TestRequest req1(GURL("http://req1")); 54 tracker.OnAddEntry(MakeStartLogEntry(1));
54 TestRequest req2(GURL("http://req2")); 55 tracker.OnAddEntry(MakeStartLogEntry(2));
55 TestRequest req3(GURL("http://req3")); 56 tracker.OnAddEntry(MakeStartLogEntry(3));
56 TestRequest req4(GURL("http://req4")); 57 tracker.OnAddEntry(MakeStartLogEntry(4));
57 TestRequest req5(GURL("http://req5")); 58 tracker.OnAddEntry(MakeStartLogEntry(5));
58 59
59 tracker.Add(&req1); 60 RequestInfoList live_reqs = tracker.GetLiveRequests();
60 tracker.Add(&req2);
61 tracker.Add(&req3);
62 tracker.Add(&req4);
63 tracker.Add(&req5);
64
65 std::vector<TestRequest*> live_reqs = tracker.GetLiveRequests();
66 61
67 ASSERT_EQ(5u, live_reqs.size()); 62 ASSERT_EQ(5u, live_reqs.size());
68 EXPECT_EQ(GURL("http://req1"), live_reqs[0]->original_url()); 63 EXPECT_EQ("http://req1", live_reqs[0].url);
69 EXPECT_EQ(GURL("http://req2"), live_reqs[1]->original_url()); 64 EXPECT_EQ("http://req2", live_reqs[1].url);
70 EXPECT_EQ(GURL("http://req3"), live_reqs[2]->original_url()); 65 EXPECT_EQ("http://req3", live_reqs[2].url);
71 EXPECT_EQ(GURL("http://req4"), live_reqs[3]->original_url()); 66 EXPECT_EQ("http://req4", live_reqs[3].url);
72 EXPECT_EQ(GURL("http://req5"), live_reqs[4]->original_url()); 67 EXPECT_EQ("http://req5", live_reqs[4].url);
73 68
74 tracker.Remove(&req1); 69 tracker.OnAddEntry(MakeEndLogEntry(1));
75 tracker.Remove(&req5); 70 tracker.OnAddEntry(MakeEndLogEntry(5));
76 tracker.Remove(&req3); 71 tracker.OnAddEntry(MakeEndLogEntry(3));
77 72
78 ASSERT_EQ(3u, tracker.GetRecentlyDeceased().size()); 73 ASSERT_EQ(3u, tracker.GetRecentlyDeceased().size());
79 74
80 live_reqs = tracker.GetLiveRequests(); 75 live_reqs = tracker.GetLiveRequests();
81 76
82 ASSERT_EQ(2u, live_reqs.size()); 77 ASSERT_EQ(2u, live_reqs.size());
83 EXPECT_EQ(GURL("http://req2"), live_reqs[0]->original_url()); 78 EXPECT_EQ("http://req2", live_reqs[0].url);
84 EXPECT_EQ(GURL("http://req4"), live_reqs[1]->original_url()); 79 EXPECT_EQ("http://req4", live_reqs[1].url);
85 } 80 }
86 81
87 TEST(RequestTrackerTest, GraveyardBounded) { 82 TEST(RequestTrackerTest, GraveyardBounded) {
88 RequestTracker<TestRequest> tracker; 83 RequestTracker tracker(NULL);
89 EXPECT_FALSE(tracker.IsUnbounded()); 84 EXPECT_FALSE(tracker.IsUnbounded());
90 EXPECT_EQ(0u, tracker.GetLiveRequests().size()); 85 EXPECT_EQ(0u, tracker.GetLiveRequests().size());
91 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size()); 86 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
92 87
93 // Add twice as many requests as will fit in the graveyard. 88 // Add twice as many requests as will fit in the graveyard.
94 for (size_t i = 0; 89 for (size_t i = 0; i < RequestTracker::kMaxGraveyardSize * 2; ++i) {
95 i < RequestTracker<TestRequest>::kMaxGraveyardSize * 2; 90 tracker.OnAddEntry(MakeStartLogEntry(i));
96 ++i) { 91 tracker.OnAddEntry(MakeEndLogEntry(i));
97 TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str()));
98 tracker.Add(&req);
99 tracker.Remove(&req);
100 } 92 }
101 93
102 // Check that only the last |kMaxGraveyardSize| requests are in-memory. 94 // Check that only the last |kMaxGraveyardSize| requests are in-memory.
103 95
104 RequestTracker<TestRequest>::RecentRequestInfoList recent_reqs = 96 RequestInfoList recent_reqs = tracker.GetRecentlyDeceased();
105 tracker.GetRecentlyDeceased();
106 97
107 ASSERT_EQ(RequestTracker<TestRequest>::kMaxGraveyardSize, recent_reqs.size()); 98 ASSERT_EQ(RequestTracker::kMaxGraveyardSize, recent_reqs.size());
108 99
109 for (size_t i = 0; i < RequestTracker<TestRequest>::kMaxGraveyardSize; ++i) { 100 for (size_t i = 0; i < RequestTracker::kMaxGraveyardSize; ++i) {
110 size_t req_number = i + RequestTracker<TestRequest>::kMaxGraveyardSize; 101 size_t req_number = i + RequestTracker::kMaxGraveyardSize;
111 GURL url(StringPrintf("http://req%" PRIuS, req_number).c_str()); 102 std::string url = StringPrintf("http://req%" PRIuS, req_number);
112 EXPECT_EQ(url, recent_reqs[i].original_url); 103 EXPECT_EQ(url, recent_reqs[i].url);
113 } 104 }
114 } 105 }
115 106
116 TEST(RequestTrackerTest, GraveyardUnbounded) { 107 TEST(RequestTrackerTest, GraveyardUnbounded) {
117 RequestTracker<TestRequest> tracker; 108 RequestTracker tracker(NULL);
118 EXPECT_FALSE(tracker.IsUnbounded()); 109 EXPECT_FALSE(tracker.IsUnbounded());
119 EXPECT_EQ(0u, tracker.GetLiveRequests().size()); 110 EXPECT_EQ(0u, tracker.GetLiveRequests().size());
120 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size()); 111 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
121 112
122 tracker.SetUnbounded(true); 113 tracker.SetUnbounded(true);
123 114
124 EXPECT_TRUE(tracker.IsUnbounded()); 115 EXPECT_TRUE(tracker.IsUnbounded());
125 116
126 // Add twice as many requests as would fit in the bounded graveyard. 117 // Add twice as many requests as would fit in the bounded graveyard.
127 118
128 size_t kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize * 2; 119 size_t kMaxSize = RequestTracker::kMaxGraveyardSize * 2;
129 for (size_t i = 0; i < kMaxSize; ++i) { 120 for (size_t i = 0; i < kMaxSize; ++i) {
130 TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str())); 121 tracker.OnAddEntry(MakeStartLogEntry(i));
131 tracker.Add(&req); 122 tracker.OnAddEntry(MakeEndLogEntry(i));
132 tracker.Remove(&req);
133 } 123 }
134 124
135 // Check that all of them got saved. 125 // Check that all of them got saved.
136 126
137 RequestTracker<TestRequest>::RecentRequestInfoList recent_reqs = 127 RequestInfoList recent_reqs = tracker.GetRecentlyDeceased();
138 tracker.GetRecentlyDeceased();
139 128
140 ASSERT_EQ(kMaxSize, recent_reqs.size()); 129 ASSERT_EQ(kMaxSize, recent_reqs.size());
141 130
142 for (size_t i = 0; i < kMaxSize; ++i) { 131 for (size_t i = 0; i < kMaxSize; ++i) {
143 GURL url(StringPrintf("http://req%" PRIuS, i).c_str()); 132 std::string url = StringPrintf("http://req%" PRIuS, i);
144 EXPECT_EQ(url, recent_reqs[i].original_url); 133 EXPECT_EQ(url, recent_reqs[i].url);
145 } 134 }
146 } 135 }
147 136
148 // Check that very long URLs are truncated. 137 // Check that very long URLs are truncated.
149 TEST(RequestTrackerTest, GraveyardURLBounded) { 138 TEST(RequestTrackerTest, GraveyardURLBounded) {
150 RequestTracker<TestRequest> tracker; 139 RequestTracker tracker(NULL);
151 EXPECT_FALSE(tracker.IsUnbounded()); 140 EXPECT_FALSE(tracker.IsUnbounded());
152 141
153 std::string big_url_spec("http://"); 142 std::string big_url("http://");
154 big_url_spec.resize(2 * RequestTracker<TestRequest>::kMaxGraveyardURLSize, 143 big_url.resize(2 * RequestTracker::kMaxGraveyardURLSize, 'x');
155 'x');
156 GURL big_url(big_url_spec);
157 TestRequest req(big_url);
158 144
159 tracker.Add(&req); 145 tracker.OnAddEntry(MakeStartLogEntryWithURL(1, big_url));
160 tracker.Remove(&req); 146 tracker.OnAddEntry(MakeEndLogEntry(1));
161 147
162 ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size()); 148 ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size());
163 // The +1 is because GURL canonicalizes with a trailing '/' ... maybe 149 EXPECT_EQ(RequestTracker::kMaxGraveyardURLSize,
164 // we should just save the std::string rather than the GURL. 150 tracker.GetRecentlyDeceased()[0].url.size());
165 EXPECT_EQ(RequestTracker<TestRequest>::kMaxGraveyardURLSize + 1,
166 tracker.GetRecentlyDeceased()[0].original_url.spec().size());
167 } 151 }
168 152
169 // Test the doesn't fail if the URL was invalid. http://crbug.com/21423. 153 // Check that we exclude "chrome://" URLs from being saved into the recent
170 TEST(URLRequestTrackerTest, TrackingInvalidURL) { 154 // requests list (graveyard).
171 RequestTracker<TestRequest> tracker; 155 TEST(RequestTrackerTest, GraveyardIsFiltered) {
156 RequestTracker tracker(NULL);
172 EXPECT_FALSE(tracker.IsUnbounded()); 157 EXPECT_FALSE(tracker.IsUnbounded());
173 158
174 EXPECT_EQ(0u, tracker.GetLiveRequests().size());
175 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
176
177 GURL invalid_url("xabc");
178 EXPECT_FALSE(invalid_url.is_valid());
179 TestRequest req(invalid_url);
180
181 tracker.Add(&req);
182 tracker.Remove(&req);
183
184 // Check that the invalid URL made it into graveyard.
185 ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size());
186 EXPECT_FALSE(tracker.GetRecentlyDeceased()[0].original_url.is_valid());
187 }
188
189 bool ShouldRequestBeAddedToGraveyard(const GURL& url) {
190 return !url.SchemeIs("chrome") && !url.SchemeIs("data");
191 }
192
193 // Check that we can exclude "chrome://" URLs and "data:" URLs from being
194 // saved into the recent requests list (graveyard), by using a filter.
195 TEST(RequestTrackerTest, GraveyardCanBeFiltered) {
196 RequestTracker<TestRequest> tracker;
197 EXPECT_FALSE(tracker.IsUnbounded());
198
199 tracker.SetGraveyardFilter(ShouldRequestBeAddedToGraveyard);
200
201 // This will be excluded. 159 // This will be excluded.
202 GURL url1("chrome://dontcare/"); 160 std::string url1 = "chrome://dontcare/";
203 TestRequest req1(url1); 161 tracker.OnAddEntry(MakeStartLogEntryWithURL(1, url1));
204 tracker.Add(&req1); 162 tracker.OnAddEntry(MakeEndLogEntry(1));
205 tracker.Remove(&req1);
206 163
207 // This will be be added to graveyard. 164 // This will be be added to graveyard.
208 GURL url2("chrome2://dontcare/"); 165 std::string url2 = "chrome2://dontcare/";
209 TestRequest req2(url2); 166 tracker.OnAddEntry(MakeStartLogEntryWithURL(2, url2));
210 tracker.Add(&req2); 167 tracker.OnAddEntry(MakeEndLogEntry(2));
211 tracker.Remove(&req2);
212 168
213 // This will be be added to graveyard. 169 // This will be be added to graveyard.
214 GURL url3("http://foo/"); 170 std::string url3 = "http://foo/";
215 TestRequest req3(url3); 171 tracker.OnAddEntry(MakeStartLogEntryWithURL(3, url3));
216 tracker.Add(&req3); 172 tracker.OnAddEntry(MakeEndLogEntry(3));
217 tracker.Remove(&req3);
218
219 // This will be be excluded.
220 GURL url4("data:sup");
221 TestRequest req4(url4);
222 tracker.Add(&req4);
223 tracker.Remove(&req4);
224 173
225 ASSERT_EQ(2u, tracker.GetRecentlyDeceased().size()); 174 ASSERT_EQ(2u, tracker.GetRecentlyDeceased().size());
226 EXPECT_EQ(url2, tracker.GetRecentlyDeceased()[0].original_url); 175 EXPECT_EQ(url2, tracker.GetRecentlyDeceased()[0].url);
227 EXPECT_EQ(url3, tracker.GetRecentlyDeceased()[1].original_url); 176 EXPECT_EQ(url3, tracker.GetRecentlyDeceased()[1].url);
228 } 177 }
229 178
230 // Convert an unbounded tracker back to being bounded. 179 // Convert an unbounded tracker back to being bounded.
231 TEST(RequestTrackerTest, ConvertUnboundedToBounded) { 180 TEST(RequestTrackerTest, ConvertUnboundedToBounded) {
232 RequestTracker<TestRequest> tracker; 181 RequestTracker tracker(NULL);
233 EXPECT_FALSE(tracker.IsUnbounded()); 182 EXPECT_FALSE(tracker.IsUnbounded());
234 EXPECT_EQ(0u, tracker.GetLiveRequests().size()); 183 EXPECT_EQ(0u, tracker.GetLiveRequests().size());
235 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size()); 184 EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
236 185
237 tracker.SetUnbounded(true); 186 tracker.SetUnbounded(true);
238 EXPECT_TRUE(tracker.IsUnbounded()); 187 EXPECT_TRUE(tracker.IsUnbounded());
239 188
240 // Add twice as many requests as would fit in the bounded graveyard. 189 // Add twice as many requests as would fit in the bounded graveyard.
241 190
242 size_t kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize * 2; 191 size_t kMaxSize = RequestTracker::kMaxGraveyardSize * 2;
243 for (size_t i = 0; i < kMaxSize; ++i) { 192 for (size_t i = 0; i < kMaxSize; ++i) {
244 TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str())); 193 tracker.OnAddEntry(MakeStartLogEntry(i));
245 tracker.Add(&req); 194 tracker.OnAddEntry(MakeEndLogEntry(i));
246 tracker.Remove(&req);
247 } 195 }
248 196
249 // Check that all of them got saved. 197 // Check that all of them got saved.
250 ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size()); 198 ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size());
251 199
252 // Now make the tracker bounded, and add more entries to its graveyard. 200 // Now make the tracker bounded, and add more entries to its graveyard.
253 tracker.SetUnbounded(false); 201 tracker.SetUnbounded(false);
254 202
255 kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize; 203 kMaxSize = RequestTracker::kMaxGraveyardSize;
256 for (size_t i = 0; i < kMaxSize; ++i) { 204 for (size_t i = kMaxSize; i < 2 * kMaxSize; ++i) {
257 TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str())); 205 tracker.OnAddEntry(MakeStartLogEntry(i));
258 tracker.Add(&req); 206 tracker.OnAddEntry(MakeEndLogEntry(i));
259 tracker.Remove(&req);
260 } 207 }
261 208
262 // We should only have kMaxGraveyardSize entries now. 209 // We should only have kMaxGraveyardSize entries now.
263 ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size()); 210 ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size());
264 } 211 }
265 212
266 } // namespace 213 } // namespace
OLDNEW
« no previous file with comments | « chrome/browser/net/passive_log_collector.cc ('k') | chrome/browser/net/view_net_internals_job_factory.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698