| Index: net/url_request/url_request_http_job.cc
|
| diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
|
| index d2a4797bff6a8310d6aede6477d1cf2198a2a28b..11c09420fc70358b8a61dcc7e112ad0d8449c049 100644
|
| --- a/net/url_request/url_request_http_job.cc
|
| +++ b/net/url_request/url_request_http_job.cc
|
| @@ -32,6 +32,11 @@
|
| #include "net/base/url_util.h"
|
| #include "net/cert/cert_status_flags.h"
|
| #include "net/cookies/cookie_store.h"
|
| +#include "net/filter/brotli_source_stream.h"
|
| +#include "net/filter/filter_source_stream.h"
|
| +#include "net/filter/gzip_source_stream.h"
|
| +#include "net/filter/sdch_source_stream.h"
|
| +#include "net/filter/source_stream.h"
|
| #include "net/http/http_content_disposition.h"
|
| #include "net/http/http_network_session.h"
|
| #include "net/http/http_request_headers.h"
|
| @@ -62,6 +67,12 @@ static const char kAvailDictionaryHeader[] = "Avail-Dictionary";
|
|
|
| namespace {
|
|
|
| +const char kDeflate[] = "deflate";
|
| +const char kGZip[] = "gzip";
|
| +const char kSdch[] = "sdch";
|
| +const char kXGZip[] = "x-gzip";
|
| +const char kBrotli[] = "br";
|
| +
|
| // True if the request method is "safe" (per section 4.2.1 of RFC 7231).
|
| bool IsMethodSafe(const std::string& method) {
|
| return method == "GET" || method == "HEAD" || method == "OPTIONS" ||
|
| @@ -178,85 +189,67 @@ net::URLRequestRedirectJob* MaybeInternallyRedirect(
|
|
|
| namespace net {
|
|
|
| -class URLRequestHttpJob::HttpFilterContext : public FilterContext {
|
| +class URLRequestHttpJob::SdchContext : public SdchPolicyDelegate::Context {
|
| public:
|
| - explicit HttpFilterContext(URLRequestHttpJob* job);
|
| - ~HttpFilterContext() override;
|
| -
|
| - // FilterContext implementation.
|
| + explicit SdchContext(URLRequestHttpJob* job);
|
| + ~SdchContext() override;
|
| + // SdchPolicyDelegate::Context implementation
|
| bool GetMimeType(std::string* mime_type) const override;
|
| - bool GetURL(GURL* gurl) const override;
|
| - base::Time GetRequestTime() const override;
|
| + bool GetURL(GURL* url) const override;
|
| + // base::Time GetRequestTime() const override;
|
| bool IsCachedContent() const override;
|
| + SdchManager* GetSdchManager() const override;
|
| SdchManager::DictionarySet* SdchDictionariesAdvertised() const override;
|
| - int64_t GetByteReadCount() const override;
|
| + // int64_t GetByteReadCount() const override;
|
| int GetResponseCode() const override;
|
| - const URLRequestContext* GetURLRequestContext() const override;
|
| - void RecordPacketStats(StatisticSelector statistic) const override;
|
| const BoundNetLog& GetNetLog() const override;
|
|
|
| private:
|
| + // URLRequestHttpJob owns SdchPolicyDelegate which owns the context, so it
|
| + // is okay to have a raw pointer reference to |job_|.
|
| URLRequestHttpJob* job_;
|
|
|
| - // URLRequestHttpJob may be detached from URLRequest, but we still need to
|
| + // URLRequestHttpJob may be detached from URLRequest, but still need to
|
| // return something.
|
| BoundNetLog dummy_log_;
|
|
|
| - DISALLOW_COPY_AND_ASSIGN(HttpFilterContext);
|
| + DISALLOW_COPY_AND_ASSIGN(SdchContext);
|
| };
|
|
|
| -URLRequestHttpJob::HttpFilterContext::HttpFilterContext(URLRequestHttpJob* job)
|
| - : job_(job) {
|
| - DCHECK(job_);
|
| -}
|
| +URLRequestHttpJob::SdchContext::SdchContext(URLRequestHttpJob* job)
|
| + : job_(job) {}
|
|
|
| -URLRequestHttpJob::HttpFilterContext::~HttpFilterContext() {
|
| -}
|
| +URLRequestHttpJob::SdchContext::~SdchContext() {}
|
|
|
| -bool URLRequestHttpJob::HttpFilterContext::GetMimeType(
|
| - std::string* mime_type) const {
|
| +bool URLRequestHttpJob::SdchContext::GetMimeType(std::string* mime_type) const {
|
| return job_->GetMimeType(mime_type);
|
| }
|
|
|
| -bool URLRequestHttpJob::HttpFilterContext::GetURL(GURL* gurl) const {
|
| +bool URLRequestHttpJob::SdchContext::GetURL(GURL* gurl) const {
|
| if (!job_->request())
|
| return false;
|
| *gurl = job_->request()->url();
|
| return true;
|
| }
|
|
|
| -base::Time URLRequestHttpJob::HttpFilterContext::GetRequestTime() const {
|
| - return job_->request() ? job_->request()->request_time() : base::Time();
|
| +bool URLRequestHttpJob::SdchContext::IsCachedContent() const {
|
| + return job_->is_cached_content_;
|
| }
|
|
|
| -bool URLRequestHttpJob::HttpFilterContext::IsCachedContent() const {
|
| - return job_->is_cached_content_;
|
| +SdchManager* URLRequestHttpJob::SdchContext::GetSdchManager() const {
|
| + return job_->request() ? job_->request()->context()->sdch_manager() : nullptr;
|
| }
|
|
|
| SdchManager::DictionarySet*
|
| -URLRequestHttpJob::HttpFilterContext::SdchDictionariesAdvertised() const {
|
| +URLRequestHttpJob::SdchContext::SdchDictionariesAdvertised() const {
|
| return job_->dictionaries_advertised_.get();
|
| }
|
|
|
| -int64_t URLRequestHttpJob::HttpFilterContext::GetByteReadCount() const {
|
| - return job_->prefilter_bytes_read();
|
| -}
|
| -
|
| -int URLRequestHttpJob::HttpFilterContext::GetResponseCode() const {
|
| +int URLRequestHttpJob::SdchContext::GetResponseCode() const {
|
| return job_->GetResponseCode();
|
| }
|
|
|
| -const URLRequestContext*
|
| -URLRequestHttpJob::HttpFilterContext::GetURLRequestContext() const {
|
| - return job_->request() ? job_->request()->context() : NULL;
|
| -}
|
| -
|
| -void URLRequestHttpJob::HttpFilterContext::RecordPacketStats(
|
| - StatisticSelector statistic) const {
|
| - job_->RecordPacketStats(statistic);
|
| -}
|
| -
|
| -const BoundNetLog& URLRequestHttpJob::HttpFilterContext::GetNetLog() const {
|
| +const BoundNetLog& URLRequestHttpJob::SdchContext::GetNetLog() const {
|
| return job_->request() ? job_->request()->net_log() : dummy_log_;
|
| }
|
|
|
| @@ -309,7 +302,6 @@ URLRequestHttpJob::URLRequestHttpJob(
|
| bytes_observed_in_packets_(0),
|
| request_time_snapshot_(),
|
| final_packet_time_(),
|
| - filter_context_(new HttpFilterContext(this)),
|
| on_headers_received_callback_(
|
| base::Bind(&URLRequestHttpJob::OnHeadersReceivedCallback,
|
| base::Unretained(this))),
|
| @@ -331,14 +323,10 @@ URLRequestHttpJob::~URLRequestHttpJob() {
|
| DCHECK(!sdch_test_control_ || !sdch_test_activated_);
|
| if (!is_cached_content_) {
|
| if (sdch_test_control_)
|
| - RecordPacketStats(FilterContext::SDCH_EXPERIMENT_HOLDBACK);
|
| + RecordPacketStats(SdchPolicyDelegate::Context::SDCH_EXPERIMENT_HOLDBACK);
|
| if (sdch_test_activated_)
|
| - RecordPacketStats(FilterContext::SDCH_EXPERIMENT_DECODE);
|
| + RecordPacketStats(SdchPolicyDelegate::Context::SDCH_EXPERIMENT_DECODE);
|
| }
|
| - // Make sure SDCH filters are told to emit histogram data while
|
| - // filter_context_ is still alive.
|
| - DestroyFilters();
|
| -
|
| DoneWithRequest(ABORTED);
|
| }
|
|
|
| @@ -1158,27 +1146,79 @@ void URLRequestHttpJob::PopulateNetErrorDetails(
|
| return transaction_->PopulateNetErrorDetails(details);
|
| }
|
|
|
| -std::unique_ptr<Filter> URLRequestHttpJob::SetupFilter() const {
|
| +std::unique_ptr<SourceStream> URLRequestHttpJob::SetUpSourceStream() {
|
| DCHECK(transaction_.get());
|
| if (!response_info_)
|
| return nullptr;
|
|
|
| - std::vector<Filter::FilterType> encoding_types;
|
| - std::string encoding_type;
|
| HttpResponseHeaders* headers = GetResponseHeaders();
|
| + std::string type;
|
| + std::vector<SourceStream::SourceType> types;
|
| size_t iter = 0;
|
| - while (headers->EnumerateHeader(&iter, "Content-Encoding", &encoding_type)) {
|
| - encoding_types.push_back(Filter::ConvertEncodingToType(encoding_type));
|
| + while (headers->EnumerateHeader(&iter, "Content-Encoding", &type)) {
|
| + if (base::LowerCaseEqualsASCII(type, kBrotli)) {
|
| + types.push_back(SourceStream::TYPE_BROTLI);
|
| + } else if (base::LowerCaseEqualsASCII(type, kDeflate)) {
|
| + types.push_back(SourceStream::TYPE_DEFLATE);
|
| + } else if (base::LowerCaseEqualsASCII(type, kGZip) ||
|
| + base::LowerCaseEqualsASCII(type, kXGZip)) {
|
| + types.push_back(SourceStream::TYPE_GZIP);
|
| + } else if (base::LowerCaseEqualsASCII(type, kSdch)) {
|
| + types.push_back(SourceStream::TYPE_SDCH);
|
| + }
|
| }
|
|
|
| - // Even if encoding types are empty, there is a chance that we need to add
|
| - // some decoding, as some proxies strip encoding completely. In such cases,
|
| - // we may need to add (for example) SDCH filtering (when the context suggests
|
| - // it is appropriate).
|
| - Filter::FixupEncodingTypes(*filter_context_, &encoding_types);
|
| + // SDCH-specific hack: if the first filter is SDCH, add a gzip filter in front
|
| + // of it in fallback mode.
|
| + // Some proxies (found currently in Argentina) strip the Content-Encoding
|
| + // text from "sdch,gzip" to a mere "sdch" without modifying the compressed
|
| + // payload. To handle this gracefully, we simulate the "probably" deleted
|
| + // ",gzip" by appending a tentative gzip decode, which will default to a
|
| + // no-op pass through filter if it doesn't get gzip headers where expected.
|
| + if (types.size() == 1 && types.at(0) == SourceStream::TYPE_SDCH) {
|
| + types.insert(types.begin(), SourceStream::TYPE_GZIP_FALLBACK);
|
| + // TODO(xunjieli): Add UMA to see how common this is.
|
| + }
|
|
|
| - return !encoding_types.empty()
|
| - ? Filter::Factory(encoding_types, *filter_context_) : NULL;
|
| + std::unique_ptr<SourceStream> upstream = URLRequestJob::SetUpSourceStream();
|
| + for (std::vector<SourceStream::SourceType>::reverse_iterator r_iter =
|
| + types.rbegin();
|
| + r_iter != types.rend(); ++r_iter) {
|
| + std::unique_ptr<FilterSourceStream> downstream;
|
| + SourceStream::SourceType type = *r_iter;
|
| + switch (type) {
|
| + case SourceStream::TYPE_BROTLI:
|
| + downstream = CreateBrotliSourceStream(std::move(upstream));
|
| + break;
|
| + case SourceStream::TYPE_SDCH: {
|
| + std::unique_ptr<SdchContext> context(new SdchContext(this));
|
| + sdch_delegate_.reset(new SdchPolicyDelegate(std::move(context)));
|
| + downstream.reset(
|
| + new SdchSourceStream(std::move(upstream), sdch_delegate_.get()));
|
| + break;
|
| + }
|
| + case SourceStream::TYPE_GZIP:
|
| + downstream = GzipSourceStream::Create(
|
| + std::move(upstream), GzipSourceStream::GZIP_SOURCE_STREAM_GZIP);
|
| + break;
|
| + case SourceStream::TYPE_DEFLATE:
|
| + downstream = GzipSourceStream::Create(
|
| + std::move(upstream), GzipSourceStream::GZIP_SOURCE_STREAM_DEFLATE);
|
| + break;
|
| + case SourceStream::TYPE_GZIP_FALLBACK:
|
| + downstream = GzipSourceStream::Create(
|
| + std::move(upstream),
|
| + GzipSourceStream::GZIP_SOURCE_STREAM_GZIP_WITH_FALLBACK);
|
| + break;
|
| + default:
|
| + NOTREACHED();
|
| + return nullptr;
|
| + }
|
| + if (downstream == nullptr)
|
| + return nullptr;
|
| + upstream = std::move(downstream);
|
| + }
|
| + return upstream;
|
| }
|
|
|
| bool URLRequestHttpJob::CopyFragmentOnRedirect(const GURL& location) const {
|
| @@ -1477,35 +1517,34 @@ void URLRequestHttpJob::UpdatePacketReadTimes() {
|
| }
|
|
|
| void URLRequestHttpJob::RecordPacketStats(
|
| - FilterContext::StatisticSelector statistic) const {
|
| + SdchPolicyDelegate::Context::StatisticSelector statistic) const {
|
| if (!packet_timing_enabled_ || (final_packet_time_ == base::Time()))
|
| return;
|
|
|
| base::TimeDelta duration = final_packet_time_ - request_time_snapshot_;
|
| switch (statistic) {
|
| - case FilterContext::SDCH_DECODE: {
|
| + case SdchPolicyDelegate::Context::StatisticSelector::SDCH_DECODE: {
|
| UMA_HISTOGRAM_CUSTOM_COUNTS("Sdch3.Network_Decode_Bytes_Processed_b",
|
| - static_cast<int>(bytes_observed_in_packets_), 500, 100000, 100);
|
| + static_cast<int>(bytes_observed_in_packets_),
|
| + 500, 100000, 100);
|
| return;
|
| }
|
| - case FilterContext::SDCH_PASSTHROUGH: {
|
| + case SdchPolicyDelegate::Context::StatisticSelector::SDCH_PASSTHROUGH: {
|
| // Despite advertising a dictionary, we handled non-sdch compressed
|
| // content.
|
| return;
|
| }
|
|
|
| - case FilterContext::SDCH_EXPERIMENT_DECODE: {
|
| - UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Decode",
|
| - duration,
|
| - base::TimeDelta::FromMilliseconds(20),
|
| - base::TimeDelta::FromMinutes(10), 100);
|
| + case SdchPolicyDelegate::Context::SDCH_EXPERIMENT_DECODE: {
|
| + UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Decode", duration,
|
| + base::TimeDelta::FromMilliseconds(20),
|
| + base::TimeDelta::FromMinutes(10), 100);
|
| return;
|
| }
|
| - case FilterContext::SDCH_EXPERIMENT_HOLDBACK: {
|
| - UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Holdback",
|
| - duration,
|
| - base::TimeDelta::FromMilliseconds(20),
|
| - base::TimeDelta::FromMinutes(10), 100);
|
| + case SdchPolicyDelegate::Context::SDCH_EXPERIMENT_HOLDBACK: {
|
| + UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Holdback", duration,
|
| + base::TimeDelta::FromMilliseconds(20),
|
| + base::TimeDelta::FromMinutes(10), 100);
|
| return;
|
| }
|
| default:
|
|
|