Chromium Code Reviews| Index: net/url_request/url_request_http_job.cc |
| diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc |
| index 127f30ce17e815dce412d457e0f264d6b7b9905a..3783e9badc3a02dddb7988089bf8c0f9d5c6e42a 100644 |
| --- a/net/url_request/url_request_http_job.cc |
| +++ b/net/url_request/url_request_http_job.cc |
| @@ -32,6 +32,11 @@ |
| #include "net/base/url_util.h" |
| #include "net/cert/cert_status_flags.h" |
| #include "net/cookies/cookie_store.h" |
| +#include "net/filter/brotli_stream_source.h" |
| +#include "net/filter/filter_stream_source.h" |
| +#include "net/filter/gzip_stream_source.h" |
| +#include "net/filter/sdch_stream_source.h" |
| +#include "net/filter/stream_source.h" |
| #include "net/http/http_content_disposition.h" |
| #include "net/http/http_network_session.h" |
| #include "net/http/http_request_headers.h" |
| @@ -63,6 +68,12 @@ static const char kAvailDictionaryHeader[] = "Avail-Dictionary"; |
| namespace { |
| +const char kDeflate[] = "deflate"; |
| +const char kGZip[] = "gzip"; |
| +const char kSdch[] = "sdch"; |
| +const char kXGZip[] = "x-gzip"; |
| +const char kBrotli[] = "br"; |
| + |
| // True if the request method is "safe" (per section 4.2.1 of RFC 7231). |
| bool IsMethodSafe(const std::string& method) { |
| return method == "GET" || method == "HEAD" || method == "OPTIONS" || |
| @@ -179,85 +190,67 @@ net::URLRequestRedirectJob* MaybeInternallyRedirect( |
| namespace net { |
| -class URLRequestHttpJob::HttpFilterContext : public FilterContext { |
| +class URLRequestHttpJob::SdchContext : public SdchPolicyDelegate::Context { |
| public: |
| - explicit HttpFilterContext(URLRequestHttpJob* job); |
| - ~HttpFilterContext() override; |
| - |
| - // FilterContext implementation. |
| + explicit SdchContext(URLRequestHttpJob* job); |
| + ~SdchContext() override; |
| + // SdchPolicyDelegate::Context implementation |
| bool GetMimeType(std::string* mime_type) const override; |
| - bool GetURL(GURL* gurl) const override; |
| - base::Time GetRequestTime() const override; |
| + bool GetURL(GURL* url) const override; |
| + // base::Time GetRequestTime() const override; |
| bool IsCachedContent() const override; |
| + SdchManager* GetSdchManager() const override; |
| SdchManager::DictionarySet* SdchDictionariesAdvertised() const override; |
| - int64_t GetByteReadCount() const override; |
| + // int64_t GetByteReadCount() const override; |
| int GetResponseCode() const override; |
| - const URLRequestContext* GetURLRequestContext() const override; |
| - void RecordPacketStats(StatisticSelector statistic) const override; |
| const BoundNetLog& GetNetLog() const override; |
| private: |
| + // URLRequestHttpJob owns SdchPolicyDelegate which owns the context, so it |
| + // is okay to have a raw pointer reference to |job_|. |
| URLRequestHttpJob* job_; |
| - // URLRequestHttpJob may be detached from URLRequest, but we still need to |
| + // URLRequestHttpJob may be detached from URLRequest, but still need to |
| // return something. |
| BoundNetLog dummy_log_; |
| - DISALLOW_COPY_AND_ASSIGN(HttpFilterContext); |
| + DISALLOW_COPY_AND_ASSIGN(SdchContext); |
| }; |
| -URLRequestHttpJob::HttpFilterContext::HttpFilterContext(URLRequestHttpJob* job) |
| - : job_(job) { |
| - DCHECK(job_); |
| -} |
| +URLRequestHttpJob::SdchContext::SdchContext(URLRequestHttpJob* job) |
| + : job_(job) {} |
| -URLRequestHttpJob::HttpFilterContext::~HttpFilterContext() { |
| -} |
| +URLRequestHttpJob::SdchContext::~SdchContext() {} |
| -bool URLRequestHttpJob::HttpFilterContext::GetMimeType( |
| - std::string* mime_type) const { |
| +bool URLRequestHttpJob::SdchContext::GetMimeType(std::string* mime_type) const { |
| return job_->GetMimeType(mime_type); |
| } |
| -bool URLRequestHttpJob::HttpFilterContext::GetURL(GURL* gurl) const { |
| +bool URLRequestHttpJob::SdchContext::GetURL(GURL* gurl) const { |
| if (!job_->request()) |
| return false; |
| *gurl = job_->request()->url(); |
| return true; |
| } |
| -base::Time URLRequestHttpJob::HttpFilterContext::GetRequestTime() const { |
| - return job_->request() ? job_->request()->request_time() : base::Time(); |
| +bool URLRequestHttpJob::SdchContext::IsCachedContent() const { |
| + return job_->is_cached_content_; |
| } |
| -bool URLRequestHttpJob::HttpFilterContext::IsCachedContent() const { |
| - return job_->is_cached_content_; |
| +SdchManager* URLRequestHttpJob::SdchContext::GetSdchManager() const { |
| + return job_->request() ? job_->request()->context()->sdch_manager() : nullptr; |
| } |
| SdchManager::DictionarySet* |
| -URLRequestHttpJob::HttpFilterContext::SdchDictionariesAdvertised() const { |
| +URLRequestHttpJob::SdchContext::SdchDictionariesAdvertised() const { |
| return job_->dictionaries_advertised_.get(); |
| } |
| -int64_t URLRequestHttpJob::HttpFilterContext::GetByteReadCount() const { |
| - return job_->prefilter_bytes_read(); |
| -} |
| - |
| -int URLRequestHttpJob::HttpFilterContext::GetResponseCode() const { |
| +int URLRequestHttpJob::SdchContext::GetResponseCode() const { |
| return job_->GetResponseCode(); |
| } |
| -const URLRequestContext* |
| -URLRequestHttpJob::HttpFilterContext::GetURLRequestContext() const { |
| - return job_->request() ? job_->request()->context() : NULL; |
| -} |
| - |
| -void URLRequestHttpJob::HttpFilterContext::RecordPacketStats( |
| - StatisticSelector statistic) const { |
| - job_->RecordPacketStats(statistic); |
| -} |
| - |
| -const BoundNetLog& URLRequestHttpJob::HttpFilterContext::GetNetLog() const { |
| +const BoundNetLog& URLRequestHttpJob::SdchContext::GetNetLog() const { |
| return job_->request() ? job_->request()->net_log() : dummy_log_; |
| } |
| @@ -310,7 +303,6 @@ URLRequestHttpJob::URLRequestHttpJob( |
| bytes_observed_in_packets_(0), |
| request_time_snapshot_(), |
| final_packet_time_(), |
| - filter_context_(new HttpFilterContext(this)), |
| on_headers_received_callback_( |
| base::Bind(&URLRequestHttpJob::OnHeadersReceivedCallback, |
| base::Unretained(this))), |
| @@ -333,14 +325,10 @@ URLRequestHttpJob::~URLRequestHttpJob() { |
| DCHECK(!sdch_test_control_ || !sdch_test_activated_); |
| if (!is_cached_content_) { |
| if (sdch_test_control_) |
| - RecordPacketStats(FilterContext::SDCH_EXPERIMENT_HOLDBACK); |
| + RecordPacketStats(SdchPolicyDelegate::Context::SDCH_EXPERIMENT_HOLDBACK); |
| if (sdch_test_activated_) |
| - RecordPacketStats(FilterContext::SDCH_EXPERIMENT_DECODE); |
| + RecordPacketStats(SdchPolicyDelegate::Context::SDCH_EXPERIMENT_DECODE); |
| } |
| - // Make sure SDCH filters are told to emit histogram data while |
| - // filter_context_ is still alive. |
| - DestroyFilters(); |
| - |
| DoneWithRequest(ABORTED); |
| } |
| @@ -1203,27 +1191,79 @@ void URLRequestHttpJob::PopulateNetErrorDetails( |
| return transaction_->PopulateNetErrorDetails(details); |
| } |
| -std::unique_ptr<Filter> URLRequestHttpJob::SetupFilter() const { |
| +std::unique_ptr<StreamSource> URLRequestHttpJob::SetupSource() { |
| DCHECK(transaction_.get()); |
| if (!response_info_) |
| return nullptr; |
| - std::vector<Filter::FilterType> encoding_types; |
| - std::string encoding_type; |
| HttpResponseHeaders* headers = GetResponseHeaders(); |
| + std::string type; |
| + std::vector<StreamSource::SourceType> types; |
| size_t iter = 0; |
| - while (headers->EnumerateHeader(&iter, "Content-Encoding", &encoding_type)) { |
| - encoding_types.push_back(Filter::ConvertEncodingToType(encoding_type)); |
| + while (headers->EnumerateHeader(&iter, "Content-Encoding", &type)) { |
| + if (base::LowerCaseEqualsASCII(type, kBrotli)) { |
| + types.push_back(StreamSource::TYPE_BROTLI); |
| + } else if (base::LowerCaseEqualsASCII(type, kDeflate)) { |
| + types.push_back(StreamSource::TYPE_DEFLATE); |
| + } else if (base::LowerCaseEqualsASCII(type, kGZip) || |
| + base::LowerCaseEqualsASCII(type, kXGZip)) { |
| + types.push_back(StreamSource::TYPE_GZIP); |
| + } else if (base::LowerCaseEqualsASCII(type, kSdch)) { |
| + types.push_back(StreamSource::TYPE_SDCH); |
| + } |
| } |
| - // Even if encoding types are empty, there is a chance that we need to add |
| - // some decoding, as some proxies strip encoding completely. In such cases, |
| - // we may need to add (for example) SDCH filtering (when the context suggests |
| - // it is appropriate). |
| - Filter::FixupEncodingTypes(*filter_context_, &encoding_types); |
| + // SDCH-specific hack: if the first filter is SDCH, add a gzip filter in front |
| + // of it in fallback mode. |
| + // Some proxies (found currently in Argentina) strip the Content-Encoding |
| + // text from "sdch,gzip" to a mere "sdch" without modifying the compressed |
| + // payload. To handle this gracefully, we simulate the "probably" deleted |
| + // ",gzip" by appending a tentative gzip decode, which will default to a |
| + // no-op pass through filter if it doesn't get gzip headers where expected. |
| + if (types.size() == 1 && types.at(0) == StreamSource::TYPE_SDCH) { |
| + types.insert(types.begin(), StreamSource::TYPE_GZIP_FALLBACK); |
| + // TODO(xunjieli): Add UMA to see how common this is. |
| + } |
| - return !encoding_types.empty() |
| - ? Filter::Factory(encoding_types, *filter_context_) : NULL; |
| + std::unique_ptr<StreamSource> previous = URLRequestJob::SetupSource(); |
| + for (std::vector<StreamSource::SourceType>::reverse_iterator r_iter = |
| + types.rbegin(); |
| + r_iter != types.rend(); ++r_iter) { |
| + std::unique_ptr<FilterStreamSource> next = nullptr; |
|
mmenke
2016/07/28 18:40:13
nit: nullptr not needed.
xunjieli
2016/08/01 16:46:23
Done.
|
| + StreamSource::SourceType type = *r_iter; |
| + switch (type) { |
| + case StreamSource::TYPE_BROTLI: |
| + next = CreateBrotliStreamSource(std::move(previous)); |
| + break; |
| + case StreamSource::TYPE_SDCH: { |
| + std::unique_ptr<SdchContext> context(new SdchContext(this)); |
| + sdch_delegate_.reset(new SdchPolicyDelegate(std::move(context))); |
| + next.reset( |
| + new SdchStreamSource(std::move(previous), sdch_delegate_.get())); |
| + break; |
| + } |
| + case StreamSource::TYPE_GZIP: |
| + next = GzipStreamSource::Create( |
| + std::move(previous), GzipStreamSource::GZIP_STREAM_SOURCE_GZIP); |
| + break; |
| + case StreamSource::TYPE_DEFLATE: |
| + next = GzipStreamSource::Create( |
| + std::move(previous), GzipStreamSource::GZIP_STREAM_SOURCE_DEFLATE); |
| + break; |
| + case StreamSource::TYPE_GZIP_FALLBACK: |
| + next = GzipStreamSource::Create( |
| + std::move(previous), |
| + GzipStreamSource::GZIP_STREAM_SOURCE_GZIP_WITH_FALLBACK); |
| + break; |
| + default: |
| + NOTREACHED(); |
| + return nullptr; |
| + } |
| + if (next == nullptr) |
| + return nullptr; |
| + previous = std::move(next); |
| + } |
| + return previous; |
| } |
| bool URLRequestHttpJob::CopyFragmentOnRedirect(const GURL& location) const { |
| @@ -1408,9 +1448,9 @@ int URLRequestHttpJob::ReadRawData(IOBuffer* buf, int buf_size) { |
| DCHECK_NE(buf_size, 0); |
| DCHECK(!read_in_progress_); |
| - int rv = transaction_->Read( |
| - buf, buf_size, |
| - base::Bind(&URLRequestHttpJob::OnReadCompleted, base::Unretained(this))); |
| + int rv = transaction_->Read(buf, buf_size, |
| + base::Bind(&URLRequestHttpJob::OnReadCompleted, |
| + weak_factory_.GetWeakPtr())); |
|
mmenke
2016/07/28 18:40:13
Is there a reason for switching to a weak_factory_
xunjieli
2016/08/01 16:46:23
Done. No reason. I think I changed that during deb
|
| if (ShouldFixMismatchedContentLength(rv)) |
| rv = OK; |
| @@ -1522,35 +1562,34 @@ void URLRequestHttpJob::UpdatePacketReadTimes() { |
| } |
| void URLRequestHttpJob::RecordPacketStats( |
| - FilterContext::StatisticSelector statistic) const { |
| + SdchPolicyDelegate::Context::StatisticSelector statistic) const { |
| if (!packet_timing_enabled_ || (final_packet_time_ == base::Time())) |
| return; |
| base::TimeDelta duration = final_packet_time_ - request_time_snapshot_; |
| switch (statistic) { |
| - case FilterContext::SDCH_DECODE: { |
| + case SdchPolicyDelegate::Context::StatisticSelector::SDCH_DECODE: { |
| UMA_HISTOGRAM_CUSTOM_COUNTS("Sdch3.Network_Decode_Bytes_Processed_b", |
| - static_cast<int>(bytes_observed_in_packets_), 500, 100000, 100); |
| + static_cast<int>(bytes_observed_in_packets_), |
| + 500, 100000, 100); |
| return; |
| } |
| - case FilterContext::SDCH_PASSTHROUGH: { |
| + case SdchPolicyDelegate::Context::StatisticSelector::SDCH_PASSTHROUGH: { |
| // Despite advertising a dictionary, we handled non-sdch compressed |
| // content. |
| return; |
| } |
| - case FilterContext::SDCH_EXPERIMENT_DECODE: { |
| - UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Decode", |
| - duration, |
| - base::TimeDelta::FromMilliseconds(20), |
| - base::TimeDelta::FromMinutes(10), 100); |
| + case SdchPolicyDelegate::Context::SDCH_EXPERIMENT_DECODE: { |
| + UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Decode", duration, |
| + base::TimeDelta::FromMilliseconds(20), |
| + base::TimeDelta::FromMinutes(10), 100); |
| return; |
| } |
| - case FilterContext::SDCH_EXPERIMENT_HOLDBACK: { |
| - UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Holdback", |
| - duration, |
| - base::TimeDelta::FromMilliseconds(20), |
| - base::TimeDelta::FromMinutes(10), 100); |
| + case SdchPolicyDelegate::Context::SDCH_EXPERIMENT_HOLDBACK: { |
| + UMA_HISTOGRAM_CUSTOM_TIMES("Sdch3.Experiment3_Holdback", duration, |
| + base::TimeDelta::FromMilliseconds(20), |
| + base::TimeDelta::FromMinutes(10), 100); |
| return; |
| } |
| default: |