| OLD | NEW |
| (Empty) |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 // The basic usage of the Filter interface is described in the comment at | |
| 6 // the beginning of filter.h. If Filter::Factory is passed a vector of | |
| 7 // size greater than 1, that interface is implemented by a series of filters | |
| 8 // connected in a chain. In such a case the first filter | |
| 9 // in the chain proxies calls to ReadData() so that its return values | |
| 10 // apply to the entire chain. | |
| 11 // | |
| 12 // In a filter chain, the data flows from first filter (held by the | |
| 13 // caller) down the chain. When ReadData() is called on any filter | |
| 14 // except for the last filter, it proxies the call down the chain, | |
| 15 // filling in the input buffers of subsequent filters if needed (== | |
| 16 // that filter's last_status() value is FILTER_NEED_MORE_DATA) and | |
| 17 // available (== the current filter has data it can output). The last | |
| 18 // Filter will then output data if possible, and return | |
| 19 // FILTER_NEED_MORE_DATA if not. Because the indirection pushes | |
| 20 // data along the filter chain at each level if it's available and the | |
| 21 // next filter needs it, a return value of FILTER_NEED_MORE_DATA from the | |
| 22 // final filter will apply to the entire chain. | |
| 23 | |
| 24 #include "net/filter/filter.h" | |
| 25 | |
| 26 #include "base/files/file_path.h" | |
| 27 #include "base/strings/string_util.h" | |
| 28 #include "base/values.h" | |
| 29 #include "net/base/io_buffer.h" | |
| 30 #include "net/base/sdch_net_log_params.h" | |
| 31 #include "net/filter/brotli_filter.h" | |
| 32 #include "net/filter/gzip_filter.h" | |
| 33 #include "net/filter/sdch_filter.h" | |
| 34 #include "net/log/net_log_event_type.h" | |
| 35 #include "net/log/net_log_with_source.h" | |
| 36 #include "net/url_request/url_request_context.h" | |
| 37 #include "url/gurl.h" | |
| 38 | |
| 39 namespace net { | |
| 40 | |
| 41 namespace { | |
| 42 | |
| 43 // Filter types (using canonical lower case only): | |
| 44 const char kBrotli[] = "br"; | |
| 45 const char kDeflate[] = "deflate"; | |
| 46 const char kGZip[] = "gzip"; | |
| 47 const char kXGZip[] = "x-gzip"; | |
| 48 const char kSdch[] = "sdch"; | |
| 49 // compress and x-compress are currently not supported. If we decide to support | |
| 50 // them, we'll need the same mime type compatibility hack we have for gzip. For | |
| 51 // more information, see Firefox's nsHttpChannel::ProcessNormal. | |
| 52 | |
| 53 // Mime types: | |
| 54 const char kTextHtml[] = "text/html"; | |
| 55 | |
| 56 // Buffer size allocated when de-compressing data. | |
| 57 const int kFilterBufSize = 32 * 1024; | |
| 58 | |
| 59 void LogSdchProblem(const FilterContext& filter_context, | |
| 60 SdchProblemCode problem) { | |
| 61 SdchManager::SdchErrorRecovery(problem); | |
| 62 filter_context.GetNetLog().AddEvent( | |
| 63 NetLogEventType::SDCH_DECODING_ERROR, | |
| 64 base::Bind(&NetLogSdchResourceProblemCallback, problem)); | |
| 65 } | |
| 66 | |
| 67 std::string FilterTypeAsString(Filter::FilterType type_id) { | |
| 68 switch (type_id) { | |
| 69 case Filter::FILTER_TYPE_BROTLI: | |
| 70 return "FILTER_TYPE_BROTLI"; | |
| 71 case Filter::FILTER_TYPE_DEFLATE: | |
| 72 return "FILTER_TYPE_DEFLATE"; | |
| 73 case Filter::FILTER_TYPE_GZIP: | |
| 74 return "FILTER_TYPE_GZIP"; | |
| 75 case Filter::FILTER_TYPE_GZIP_HELPING_SDCH: | |
| 76 return "FILTER_TYPE_GZIP_HELPING_SDCH"; | |
| 77 case Filter::FILTER_TYPE_SDCH: | |
| 78 return "FILTER_TYPE_SDCH"; | |
| 79 case Filter::FILTER_TYPE_SDCH_POSSIBLE : | |
| 80 return "FILTER_TYPE_SDCH_POSSIBLE "; | |
| 81 case Filter::FILTER_TYPE_UNSUPPORTED: | |
| 82 return "FILTER_TYPE_UNSUPPORTED"; | |
| 83 case Filter::FILTER_TYPE_MAX: | |
| 84 return "FILTER_TYPE_MAX"; | |
| 85 } | |
| 86 return ""; | |
| 87 } | |
| 88 | |
| 89 } // namespace | |
| 90 | |
| 91 FilterContext::~FilterContext() { | |
| 92 } | |
| 93 | |
| 94 Filter::~Filter() {} | |
| 95 | |
| 96 // static | |
| 97 std::unique_ptr<Filter> Filter::Factory( | |
| 98 const std::vector<FilterType>& filter_types, | |
| 99 const FilterContext& filter_context) { | |
| 100 if (filter_types.empty()) | |
| 101 return nullptr; | |
| 102 | |
| 103 std::unique_ptr<Filter> filter_list = nullptr; // Linked list of filters. | |
| 104 for (size_t i = 0; i < filter_types.size(); i++) { | |
| 105 filter_list = PrependNewFilter(filter_types[i], filter_context, | |
| 106 kFilterBufSize, std::move(filter_list)); | |
| 107 if (!filter_list) | |
| 108 return nullptr; | |
| 109 } | |
| 110 return filter_list; | |
| 111 } | |
| 112 | |
| 113 // static | |
| 114 std::unique_ptr<Filter> Filter::GZipFactory() { | |
| 115 return InitGZipFilter(FILTER_TYPE_GZIP, kFilterBufSize); | |
| 116 } | |
| 117 | |
| 118 // static | |
| 119 std::unique_ptr<Filter> Filter::FactoryForTests( | |
| 120 const std::vector<FilterType>& filter_types, | |
| 121 const FilterContext& filter_context, | |
| 122 int buffer_size) { | |
| 123 if (filter_types.empty()) | |
| 124 return nullptr; | |
| 125 | |
| 126 std::unique_ptr<Filter> filter_list; // Linked list of filters. | |
| 127 for (size_t i = 0; i < filter_types.size(); i++) { | |
| 128 filter_list = PrependNewFilter(filter_types[i], filter_context, buffer_size, | |
| 129 std::move(filter_list)); | |
| 130 if (!filter_list) | |
| 131 return nullptr; | |
| 132 } | |
| 133 return filter_list; | |
| 134 } | |
| 135 | |
| 136 Filter::FilterStatus Filter::ReadData(char* dest_buffer, int* dest_len) { | |
| 137 const int dest_buffer_capacity = *dest_len; | |
| 138 if (last_status_ == FILTER_ERROR) | |
| 139 return last_status_; | |
| 140 if (!next_filter_.get()) | |
| 141 return last_status_ = ReadFilteredData(dest_buffer, dest_len); | |
| 142 | |
| 143 // This filter needs more data, but it's not clear that the rest of | |
| 144 // the chain does; delegate the actual status return to the next filter. | |
| 145 if (last_status_ == FILTER_NEED_MORE_DATA && !stream_data_len()) { | |
| 146 last_status_ = next_filter_->ReadData(dest_buffer, dest_len); | |
| 147 return last_status_; | |
| 148 } | |
| 149 | |
| 150 do { | |
| 151 if (next_filter_->last_status() == FILTER_NEED_MORE_DATA) { | |
| 152 PushDataIntoNextFilter(); | |
| 153 if (FILTER_ERROR == last_status_) { | |
| 154 *dest_len = 0; | |
| 155 return FILTER_ERROR; | |
| 156 } | |
| 157 } | |
| 158 *dest_len = dest_buffer_capacity; // Reset the input/output parameter. | |
| 159 | |
| 160 next_filter_->ReadData(dest_buffer, dest_len); | |
| 161 if (FILTER_NEED_MORE_DATA == last_status_) { | |
| 162 last_status_ = next_filter_->last_status(); | |
| 163 return last_status_; | |
| 164 } | |
| 165 | |
| 166 // In the case where this filter has data internally, and is indicating such | |
| 167 // with a last_status_ of FILTER_OK, but at the same time the next filter in | |
| 168 // the chain indicated it FILTER_NEED_MORE_DATA, we have to be cautious | |
| 169 // about confusing the caller. The API confusion can appear if we return | |
| 170 // FILTER_OK (suggesting we have more data in aggregate), but yet we don't | |
| 171 // populate our output buffer. When that is the case, we need to | |
| 172 // alternately call our filter element, and the next_filter element until we | |
| 173 // get out of this state (by pumping data into the next filter until it | |
| 174 // outputs data, or it runs out of data and reports that it NEED_MORE_DATA.) | |
| 175 } while (FILTER_OK == last_status_ && | |
| 176 FILTER_NEED_MORE_DATA == next_filter_->last_status() && | |
| 177 0 == *dest_len); | |
| 178 | |
| 179 if (next_filter_->last_status() == FILTER_ERROR) { | |
| 180 last_status_ = FILTER_ERROR; | |
| 181 *dest_len = 0; | |
| 182 return FILTER_ERROR; | |
| 183 } | |
| 184 last_status_ = FILTER_OK; | |
| 185 return FILTER_OK; | |
| 186 } | |
| 187 | |
| 188 bool Filter::FlushStreamBuffer(int stream_data_len) { | |
| 189 DCHECK_LE(stream_data_len, stream_buffer_size_); | |
| 190 if (stream_data_len <= 0 || stream_data_len > stream_buffer_size_) | |
| 191 return false; | |
| 192 | |
| 193 DCHECK(stream_buffer()); | |
| 194 // Bail out if there is more data in the stream buffer to be filtered. | |
| 195 if (!stream_buffer() || stream_data_len_) | |
| 196 return false; | |
| 197 | |
| 198 next_stream_data_ = stream_buffer()->data(); | |
| 199 stream_data_len_ = stream_data_len; | |
| 200 last_status_ = FILTER_OK; | |
| 201 return true; | |
| 202 } | |
| 203 | |
| 204 // static | |
| 205 Filter::FilterType Filter::ConvertEncodingToType( | |
| 206 const std::string& filter_type) { | |
| 207 FilterType type_id; | |
| 208 if (base::LowerCaseEqualsASCII(filter_type, kBrotli)) { | |
| 209 type_id = FILTER_TYPE_BROTLI; | |
| 210 } else if (base::LowerCaseEqualsASCII(filter_type, kDeflate)) { | |
| 211 type_id = FILTER_TYPE_DEFLATE; | |
| 212 } else if (base::LowerCaseEqualsASCII(filter_type, kGZip) || | |
| 213 base::LowerCaseEqualsASCII(filter_type, kXGZip)) { | |
| 214 type_id = FILTER_TYPE_GZIP; | |
| 215 } else if (base::LowerCaseEqualsASCII(filter_type, kSdch)) { | |
| 216 type_id = FILTER_TYPE_SDCH; | |
| 217 } else { | |
| 218 // Note we also consider "identity" and "uncompressed" UNSUPPORTED as | |
| 219 // filter should be disabled in such cases. | |
| 220 type_id = FILTER_TYPE_UNSUPPORTED; | |
| 221 } | |
| 222 return type_id; | |
| 223 } | |
| 224 | |
| 225 // static | |
| 226 void Filter::FixupEncodingTypes( | |
| 227 const FilterContext& filter_context, | |
| 228 std::vector<FilterType>* encoding_types) { | |
| 229 std::string mime_type; | |
| 230 bool success = filter_context.GetMimeType(&mime_type); | |
| 231 DCHECK(success || mime_type.empty()); | |
| 232 | |
| 233 // If the request was for SDCH content, then we might need additional fixups. | |
| 234 if (!filter_context.SdchDictionariesAdvertised()) { | |
| 235 // It was not an SDCH request, so we'll just record stats. | |
| 236 if (1 < encoding_types->size()) { | |
| 237 // Multiple filters were intended to only be used for SDCH (thus far!) | |
| 238 LogSdchProblem(filter_context, SDCH_MULTIENCODING_FOR_NON_SDCH_REQUEST); | |
| 239 } | |
| 240 if ((1 == encoding_types->size()) && | |
| 241 (FILTER_TYPE_SDCH == encoding_types->front())) { | |
| 242 LogSdchProblem(filter_context, | |
| 243 SDCH_SDCH_CONTENT_ENCODE_FOR_NON_SDCH_REQUEST); | |
| 244 } | |
| 245 return; | |
| 246 } | |
| 247 | |
| 248 // The request was tagged as an SDCH request, which means the server supplied | |
| 249 // a dictionary, and we advertised it in the request. Some proxies will do | |
| 250 // very strange things to the request, or the response, so we have to handle | |
| 251 // them gracefully. | |
| 252 | |
| 253 // If content encoding included SDCH, then everything is "relatively" fine. | |
| 254 if (!encoding_types->empty() && | |
| 255 (FILTER_TYPE_SDCH == encoding_types->front())) { | |
| 256 // Some proxies (found currently in Argentina) strip the Content-Encoding | |
| 257 // text from "sdch,gzip" to a mere "sdch" without modifying the compressed | |
| 258 // payload. To handle this gracefully, we simulate the "probably" deleted | |
| 259 // ",gzip" by appending a tentative gzip decode, which will default to a | |
| 260 // no-op pass through filter if it doesn't get gzip headers where expected. | |
| 261 if (1 == encoding_types->size()) { | |
| 262 encoding_types->push_back(FILTER_TYPE_GZIP_HELPING_SDCH); | |
| 263 LogSdchProblem(filter_context, SDCH_OPTIONAL_GUNZIP_ENCODING_ADDED); | |
| 264 } | |
| 265 return; | |
| 266 } | |
| 267 | |
| 268 // There are now several cases to handle for an SDCH request. Foremost, if | |
| 269 // the outbound request was stripped so as not to advertise support for | |
| 270 // encodings, we might get back content with no encoding, or (for example) | |
| 271 // just gzip. We have to be sure that any changes we make allow for such | |
| 272 // minimal coding to work. That issue is why we use TENTATIVE filters if we | |
| 273 // add any, as those filters sniff the content, and act as pass-through | |
| 274 // filters if headers are not found. | |
| 275 | |
| 276 // If the outbound GET is not modified, then the server will generally try to | |
| 277 // send us SDCH encoded content. As that content returns, there are several | |
| 278 // corruptions of the header "content-encoding" that proxies may perform (and | |
| 279 // have been detected in the wild). We already dealt with the a honest | |
| 280 // content encoding of "sdch,gzip" being corrupted into "sdch" with on change | |
| 281 // of the actual content. Another common corruption is to either disscard | |
| 282 // the accurate content encoding, or to replace it with gzip only (again, with | |
| 283 // no change in actual content). The last observed corruption it to actually | |
| 284 // change the content, such as by re-gzipping it, and that may happen along | |
| 285 // with corruption of the stated content encoding (wow!). | |
| 286 | |
| 287 // The one unresolved failure mode comes when we advertise a dictionary, and | |
| 288 // the server tries to *send* a gzipped file (not gzip encode content), and | |
| 289 // then we could do a gzip decode :-(. Since SDCH is only (currently) | |
| 290 // supported server side on paths that only send HTML content, this mode has | |
| 291 // never surfaced in the wild (and is unlikely to). | |
| 292 // We will gather a lot of stats as we perform the fixups | |
| 293 if (base::StartsWith(mime_type, kTextHtml, | |
| 294 base::CompareCase::INSENSITIVE_ASCII)) { | |
| 295 // Suspicious case: Advertised dictionary, but server didn't use sdch, and | |
| 296 // we're HTML tagged. | |
| 297 if (encoding_types->empty()) { | |
| 298 LogSdchProblem(filter_context, SDCH_ADDED_CONTENT_ENCODING); | |
| 299 } else if (1 == encoding_types->size()) { | |
| 300 LogSdchProblem(filter_context, SDCH_FIXED_CONTENT_ENCODING); | |
| 301 } else { | |
| 302 LogSdchProblem(filter_context, SDCH_FIXED_CONTENT_ENCODINGS); | |
| 303 } | |
| 304 } else { | |
| 305 // Remarkable case!?! We advertised an SDCH dictionary, content-encoding | |
| 306 // was not marked for SDCH processing: Why did the server suggest an SDCH | |
| 307 // dictionary in the first place??. Also, the content isn't | |
| 308 // tagged as HTML, despite the fact that SDCH encoding is mostly likely for | |
| 309 // HTML: Did some anti-virus system strip this tag (sometimes they strip | |
| 310 // accept-encoding headers on the request)?? Does the content encoding not | |
| 311 // start with "text/html" for some other reason?? We'll report this as a | |
| 312 // fixup to a binary file, but it probably really is text/html (some how). | |
| 313 if (encoding_types->empty()) { | |
| 314 LogSdchProblem(filter_context, SDCH_BINARY_ADDED_CONTENT_ENCODING); | |
| 315 } else if (1 == encoding_types->size()) { | |
| 316 LogSdchProblem(filter_context, SDCH_BINARY_FIXED_CONTENT_ENCODING); | |
| 317 } else { | |
| 318 LogSdchProblem(filter_context, SDCH_BINARY_FIXED_CONTENT_ENCODINGS); | |
| 319 } | |
| 320 } | |
| 321 | |
| 322 // Leave the existing encoding type to be processed first, and add our | |
| 323 // tentative decodings to be done afterwards. Vodaphone UK reportedyl will | |
| 324 // perform a second layer of gzip encoding atop the server's sdch,gzip | |
| 325 // encoding, and then claim that the content encoding is a mere gzip. As a | |
| 326 // result we'll need (in that case) to do the gunzip, plus our tentative | |
| 327 // gunzip and tentative SDCH decoding. | |
| 328 // This approach nicely handles the empty() list as well, and should work with | |
| 329 // other (as yet undiscovered) proxies the choose to re-compressed with some | |
| 330 // other encoding (such as bzip2, etc.). | |
| 331 encoding_types->insert(encoding_types->begin(), | |
| 332 FILTER_TYPE_GZIP_HELPING_SDCH); | |
| 333 encoding_types->insert(encoding_types->begin(), FILTER_TYPE_SDCH_POSSIBLE); | |
| 334 return; | |
| 335 } | |
| 336 | |
| 337 std::string Filter::OrderedFilterList() const { | |
| 338 if (next_filter_) { | |
| 339 return FilterTypeAsString(type_id_) + "," + | |
| 340 next_filter_->OrderedFilterList(); | |
| 341 } else { | |
| 342 return FilterTypeAsString(type_id_); | |
| 343 } | |
| 344 } | |
| 345 | |
| 346 Filter::Filter(FilterType type_id) | |
| 347 : stream_buffer_(nullptr), | |
| 348 stream_buffer_size_(0), | |
| 349 next_stream_data_(nullptr), | |
| 350 stream_data_len_(0), | |
| 351 last_status_(FILTER_NEED_MORE_DATA), | |
| 352 type_id_(type_id) {} | |
| 353 | |
| 354 Filter::FilterStatus Filter::CopyOut(char* dest_buffer, int* dest_len) { | |
| 355 int out_len; | |
| 356 int input_len = *dest_len; | |
| 357 *dest_len = 0; | |
| 358 | |
| 359 if (0 == stream_data_len_) | |
| 360 return Filter::FILTER_NEED_MORE_DATA; | |
| 361 | |
| 362 out_len = std::min(input_len, stream_data_len_); | |
| 363 memcpy(dest_buffer, next_stream_data_, out_len); | |
| 364 *dest_len += out_len; | |
| 365 stream_data_len_ -= out_len; | |
| 366 if (0 == stream_data_len_) { | |
| 367 next_stream_data_ = nullptr; | |
| 368 return Filter::FILTER_NEED_MORE_DATA; | |
| 369 } else { | |
| 370 next_stream_data_ += out_len; | |
| 371 return Filter::FILTER_OK; | |
| 372 } | |
| 373 } | |
| 374 | |
| 375 // static | |
| 376 std::unique_ptr<Filter> Filter::InitBrotliFilter(FilterType type_id, | |
| 377 int buffer_size) { | |
| 378 std::unique_ptr<Filter> brotli_filter(CreateBrotliFilter(type_id)); | |
| 379 if (!brotli_filter.get()) | |
| 380 return nullptr; | |
| 381 | |
| 382 brotli_filter->InitBuffer(buffer_size); | |
| 383 return brotli_filter; | |
| 384 } | |
| 385 | |
| 386 // static | |
| 387 std::unique_ptr<Filter> Filter::InitGZipFilter(FilterType type_id, | |
| 388 int buffer_size) { | |
| 389 std::unique_ptr<GZipFilter> gz_filter(new GZipFilter(type_id)); | |
| 390 gz_filter->InitBuffer(buffer_size); | |
| 391 return gz_filter->InitDecoding(type_id) ? std::move(gz_filter) : nullptr; | |
| 392 } | |
| 393 | |
| 394 // static | |
| 395 std::unique_ptr<Filter> Filter::InitSdchFilter( | |
| 396 FilterType type_id, | |
| 397 const FilterContext& filter_context, | |
| 398 int buffer_size) { | |
| 399 std::unique_ptr<SdchFilter> sdch_filter( | |
| 400 new SdchFilter(type_id, filter_context)); | |
| 401 sdch_filter->InitBuffer(buffer_size); | |
| 402 return sdch_filter->InitDecoding(type_id) ? std::move(sdch_filter) : nullptr; | |
| 403 } | |
| 404 | |
| 405 // static | |
| 406 std::unique_ptr<Filter> Filter::PrependNewFilter( | |
| 407 FilterType type_id, | |
| 408 const FilterContext& filter_context, | |
| 409 int buffer_size, | |
| 410 std::unique_ptr<Filter> filter_list) { | |
| 411 std::unique_ptr<Filter> first_filter; // Soon to be start of chain. | |
| 412 switch (type_id) { | |
| 413 case FILTER_TYPE_BROTLI: | |
| 414 first_filter = InitBrotliFilter(type_id, buffer_size); | |
| 415 break; | |
| 416 case FILTER_TYPE_GZIP_HELPING_SDCH: | |
| 417 case FILTER_TYPE_DEFLATE: | |
| 418 case FILTER_TYPE_GZIP: | |
| 419 first_filter = InitGZipFilter(type_id, buffer_size); | |
| 420 break; | |
| 421 case FILTER_TYPE_SDCH: | |
| 422 case FILTER_TYPE_SDCH_POSSIBLE: | |
| 423 if (filter_context.GetURLRequestContext()->sdch_manager()) { | |
| 424 first_filter = InitSdchFilter(type_id, filter_context, buffer_size); | |
| 425 } | |
| 426 break; | |
| 427 default: | |
| 428 break; | |
| 429 } | |
| 430 | |
| 431 if (!first_filter.get()) | |
| 432 return nullptr; | |
| 433 | |
| 434 first_filter->next_filter_ = std::move(filter_list); | |
| 435 return first_filter; | |
| 436 } | |
| 437 | |
| 438 void Filter::InitBuffer(int buffer_size) { | |
| 439 DCHECK(!stream_buffer()); | |
| 440 DCHECK_GT(buffer_size, 0); | |
| 441 stream_buffer_ = new IOBuffer(buffer_size); | |
| 442 stream_buffer_size_ = buffer_size; | |
| 443 } | |
| 444 | |
| 445 void Filter::PushDataIntoNextFilter() { | |
| 446 IOBuffer* next_buffer = next_filter_->stream_buffer(); | |
| 447 int next_size = next_filter_->stream_buffer_size(); | |
| 448 last_status_ = ReadFilteredData(next_buffer->data(), &next_size); | |
| 449 if (FILTER_ERROR != last_status_) | |
| 450 next_filter_->FlushStreamBuffer(next_size); | |
| 451 } | |
| 452 | |
| 453 } // namespace net | |
| OLD | NEW |