OLD | NEW |
(Empty) | |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 #include "net/spdy/hpack_huffman_aggregator.h" |
| 5 |
| 6 #include "base/metrics/histogram.h" |
| 7 #include "base/metrics/statistics_recorder.h" |
| 8 #include "net/base/load_flags.h" |
| 9 #include "net/http/http_request_headers.h" |
| 10 #include "net/http/http_request_info.h" |
| 11 #include "net/http/http_response_headers.h" |
| 12 #include "testing/gmock/include/gmock/gmock.h" |
| 13 #include "testing/gtest/include/gtest/gtest.h" |
| 14 |
| 15 namespace net { |
| 16 |
| 17 using ::testing::Each; |
| 18 using ::testing::ElementsAre; |
| 19 using ::testing::Eq; |
| 20 using ::testing::Pair; |
| 21 |
| 22 namespace { |
| 23 const char kHistogramName[] = "Net.SpdyHpackEncodedCharacterFrequency"; |
| 24 } // namespace |
| 25 |
| 26 namespace test { |
| 27 |
| 28 class HpackHuffmanAggregatorPeer { |
| 29 public: |
| 30 explicit HpackHuffmanAggregatorPeer(HpackHuffmanAggregator* agg) |
| 31 : agg_(agg) {} |
| 32 |
| 33 std::vector<size_t>* counts() { |
| 34 return &agg_->counts_; |
| 35 } |
| 36 HpackHuffmanAggregator::OriginEncoders* encoders() { |
| 37 return &agg_->encoders_; |
| 38 } |
| 39 size_t total_counts() { |
| 40 return agg_->total_counts_; |
| 41 } |
| 42 void set_total_counts(size_t total_counts) { |
| 43 agg_->total_counts_ = total_counts; |
| 44 } |
| 45 void set_max_encoders(size_t max_encoders) { |
| 46 agg_->max_encoders_ = max_encoders; |
| 47 } |
| 48 static bool IsCrossOrigin(const HttpRequestInfo& request) { |
| 49 return HpackHuffmanAggregator::IsCrossOrigin(request); |
| 50 } |
| 51 static void CreateSpdyHeadersFromHttpResponse( |
| 52 const HttpResponseHeaders& headers, |
| 53 SpdyHeaderBlock* headers_out) { |
| 54 HpackHuffmanAggregator::CreateSpdyHeadersFromHttpResponse( |
| 55 headers, headers_out); |
| 56 } |
| 57 HpackEncoder* ObtainEncoder(const SpdySessionKey& key) { |
| 58 return agg_->ObtainEncoder(key); |
| 59 } |
| 60 void PublishCounts() { |
| 61 agg_->PublishCounts(); |
| 62 } |
| 63 |
| 64 private: |
| 65 HpackHuffmanAggregator* agg_; |
| 66 }; |
| 67 |
| 68 } // namespace test |
| 69 |
| 70 class HpackHuffmanAggregatorTest : public ::testing::Test { |
| 71 protected: |
| 72 HpackHuffmanAggregatorTest() |
| 73 : peer_(&agg_) {} |
| 74 |
| 75 HpackHuffmanAggregator agg_; |
| 76 test::HpackHuffmanAggregatorPeer peer_; |
| 77 }; |
| 78 |
| 79 TEST_F(HpackHuffmanAggregatorTest, CrossOriginDetermination) { |
| 80 HttpRequestInfo request; |
| 81 request.url = GURL("https://www.foo.com/a/page"); |
| 82 |
| 83 // Main load without referer. |
| 84 request.load_flags = LOAD_MAIN_FRAME; |
| 85 EXPECT_FALSE(peer_.IsCrossOrigin(request)); |
| 86 |
| 87 // Non-main load without referer. Treated as cross-origin. |
| 88 request.load_flags = 0; |
| 89 EXPECT_TRUE(peer_.IsCrossOrigin(request)); |
| 90 |
| 91 // Main load with different referer origin. |
| 92 request.load_flags = LOAD_MAIN_FRAME; |
| 93 request.extra_headers.SetHeader(HttpRequestHeaders::kReferer, |
| 94 "https://www.bar.com/other/page"); |
| 95 EXPECT_FALSE(peer_.IsCrossOrigin(request)); |
| 96 |
| 97 // Non-main load with different referer orign. |
| 98 request.load_flags = 0; |
| 99 EXPECT_TRUE(peer_.IsCrossOrigin(request)); |
| 100 |
| 101 // Non-main load with same referer orign. |
| 102 request.extra_headers.SetHeader(HttpRequestHeaders::kReferer, |
| 103 "https://www.foo.com/other/page"); |
| 104 EXPECT_FALSE(peer_.IsCrossOrigin(request)); |
| 105 |
| 106 // Non-main load with same referer host but different schemes. |
| 107 request.extra_headers.SetHeader(HttpRequestHeaders::kReferer, |
| 108 "http://www.foo.com/other/page"); |
| 109 EXPECT_TRUE(peer_.IsCrossOrigin(request)); |
| 110 } |
| 111 |
| 112 TEST_F(HpackHuffmanAggregatorTest, EncoderLRUQueue) { |
| 113 peer_.set_max_encoders(2); |
| 114 |
| 115 SpdySessionKey key1(HostPortPair("one.com", 443), ProxyServer::Direct(), |
| 116 PRIVACY_MODE_ENABLED); |
| 117 SpdySessionKey key2(HostPortPair("two.com", 443), ProxyServer::Direct(), |
| 118 PRIVACY_MODE_ENABLED); |
| 119 SpdySessionKey key3(HostPortPair("three.com", 443), ProxyServer::Direct(), |
| 120 PRIVACY_MODE_ENABLED); |
| 121 |
| 122 // Creates one.com. |
| 123 HpackEncoder* one = peer_.ObtainEncoder(key1); |
| 124 EXPECT_EQ(1u, peer_.encoders()->size()); |
| 125 |
| 126 // Creates two.com. No evictions. |
| 127 HpackEncoder* two = peer_.ObtainEncoder(key2); |
| 128 EXPECT_EQ(2u, peer_.encoders()->size()); |
| 129 EXPECT_NE(one, two); |
| 130 |
| 131 // Touch one.com. |
| 132 EXPECT_EQ(one, peer_.ObtainEncoder(key1)); |
| 133 |
| 134 // Creates three.com. Evicts two.com, as it's least-recently used. |
| 135 HpackEncoder* three = peer_.ObtainEncoder(key3); |
| 136 EXPECT_EQ(one, peer_.ObtainEncoder(key1)); |
| 137 EXPECT_NE(one, three); |
| 138 EXPECT_EQ(2u, peer_.encoders()->size()); |
| 139 } |
| 140 |
| 141 TEST_F(HpackHuffmanAggregatorTest, PublishCounts) { |
| 142 (*peer_.counts())[0] = 1; |
| 143 (*peer_.counts())[255] = 10; |
| 144 (*peer_.counts())[128] = 101; |
| 145 peer_.set_total_counts(112); |
| 146 |
| 147 peer_.PublishCounts(); |
| 148 |
| 149 // Internal counts were reset after being published. |
| 150 EXPECT_THAT(*peer_.counts(), Each(Eq(0u))); |
| 151 EXPECT_EQ(0u, peer_.total_counts()); |
| 152 |
| 153 // Verify histogram counts match the expectation. |
| 154 scoped_ptr<base::HistogramSamples> samples = |
| 155 base::StatisticsRecorder::FindHistogram(kHistogramName) |
| 156 ->SnapshotSamples(); |
| 157 |
| 158 EXPECT_EQ(0, samples->GetCount(0)); |
| 159 EXPECT_EQ(1, samples->GetCount(1)); |
| 160 EXPECT_EQ(101, samples->GetCount(129)); |
| 161 EXPECT_EQ(10, samples->GetCount(256)); |
| 162 EXPECT_EQ(112, samples->TotalCount()); |
| 163 |
| 164 // Publish a second round of counts; |
| 165 (*peer_.counts())[1] = 32; |
| 166 (*peer_.counts())[128] = 5; |
| 167 peer_.set_total_counts(37); |
| 168 |
| 169 peer_.PublishCounts(); |
| 170 |
| 171 // Verify they've been aggregated into the previous counts. |
| 172 samples = base::StatisticsRecorder::FindHistogram(kHistogramName) |
| 173 ->SnapshotSamples(); |
| 174 |
| 175 EXPECT_EQ(0, samples->GetCount(0)); |
| 176 EXPECT_EQ(1, samples->GetCount(1)); |
| 177 EXPECT_EQ(32, samples->GetCount(2)); |
| 178 EXPECT_EQ(106, samples->GetCount(129)); |
| 179 EXPECT_EQ(10, samples->GetCount(256)); |
| 180 EXPECT_EQ(149, samples->TotalCount()); |
| 181 } |
| 182 |
| 183 TEST_F(HpackHuffmanAggregatorTest, CreateSpdyResponseHeaders) { |
| 184 char kRawHeaders[] = |
| 185 "HTTP/1.1 202 Accepted \0" |
| 186 "Content-TYPE : text/html; charset=utf-8 \0" |
| 187 "Set-Cookie: foo=bar \0" |
| 188 "Set-Cookie: baz=bing \0" |
| 189 "Cache-Control: pragma=no-cache \0" |
| 190 "Cache-CONTROL: expires=12345 \0\0"; |
| 191 |
| 192 scoped_refptr<HttpResponseHeaders> parsed_headers(new HttpResponseHeaders( |
| 193 std::string(kRawHeaders, arraysize(kRawHeaders) - 1))); |
| 194 |
| 195 SpdyHeaderBlock headers; |
| 196 peer_.CreateSpdyHeadersFromHttpResponse(*parsed_headers, &headers); |
| 197 EXPECT_THAT(headers, ElementsAre( |
| 198 Pair(":status", "202"), |
| 199 Pair("cache-control", std::string("pragma=no-cache\0expires=12345", 29)), |
| 200 Pair("content-type", "text/html; charset=utf-8"), |
| 201 Pair("set-cookie", std::string("foo=bar\0baz=bing", 16)))); |
| 202 } |
| 203 |
| 204 } // namespace net |
OLD | NEW |