Chromium Code Reviews| Index: net/url_request/url_request_http_job.cc |
| diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc |
| index a7388d1fde70b469ad1e0e06b4a8056b857ee828..e7168559b2014c7ad1d97f14c80f3a1361bac111 100644 |
| --- a/net/url_request/url_request_http_job.cc |
| +++ b/net/url_request/url_request_http_job.cc |
| @@ -66,6 +66,7 @@ class URLRequestHttpJob::HttpFilterContext : public FilterContext { |
| virtual bool IsSdchResponse() const OVERRIDE; |
| virtual int64 GetByteReadCount() const OVERRIDE; |
| virtual int GetResponseCode() const OVERRIDE; |
| + virtual const URLRequestContext* GetURLRequestContext() const OVERRIDE; |
| virtual void RecordPacketStats(StatisticSelector statistic) const OVERRIDE; |
| // Method to allow us to reset filter context for a response that should have |
| @@ -134,6 +135,11 @@ int URLRequestHttpJob::HttpFilterContext::GetResponseCode() const { |
| return job_->GetResponseCode(); |
| } |
| +const URLRequestContext* |
| +URLRequestHttpJob::HttpFilterContext::GetURLRequestContext() const { |
| + return job_->request()->context(); |
| +} |
| + |
| void URLRequestHttpJob::HttpFilterContext::RecordPacketStats( |
| StatisticSelector statistic) const { |
| job_->RecordPacketStats(statistic); |
| @@ -220,20 +226,6 @@ URLRequestHttpJob::~URLRequestHttpJob() { |
| // filter_context_ is still alive. |
| DestroyFilters(); |
| - if (sdch_dictionary_url_.is_valid()) { |
| - // Prior to reaching the destructor, request_ has been set to a NULL |
| - // pointer, so request_->url() is no longer valid in the destructor, and we |
| - // use an alternate copy |request_info_.url|. |
| - SdchManager* manager = SdchManager::Global(); |
| - // To be extra safe, since this is a "different time" from when we decided |
| - // to get the dictionary, we'll validate that an SdchManager is available. |
| - // At shutdown time, care is taken to be sure that we don't delete this |
| - // globally useful instance "too soon," so this check is just defensive |
| - // coding to assure that IF the system is shutting down, we don't have any |
| - // problem if the manager was deleted ahead of time. |
| - if (manager) // Defensive programming. |
| - manager->FetchDictionary(request_info_.url, sdch_dictionary_url_); |
| - } |
| DoneWithRequest(ABORTED); |
| } |
| @@ -313,8 +305,8 @@ void URLRequestHttpJob::NotifyHeadersComplete() { |
| ProcessStrictTransportSecurityHeader(); |
| ProcessPublicKeyPinsHeader(); |
| - if (SdchManager::Global() && |
| - SdchManager::Global()->IsInSupportedDomain(request_->url())) { |
| + SdchManager* manager(request()->context()->sdch_manager()); |
|
jar (doing other things)
2014/06/09 23:03:01
nit: in this context, just as in line 463, a bette
Randy Smith (Not in Mondays)
2014/06/10 20:38:41
Done.
|
| + if (manager && manager->IsInSupportedDomain(request_->url())) { |
| const std::string name = "Get-Dictionary"; |
| std::string url_text; |
| void* iter = NULL; |
| @@ -329,7 +321,12 @@ void URLRequestHttpJob::NotifyHeadersComplete() { |
| // alternate copy. |
| DCHECK_EQ(request_->url(), request_info_.url); |
| // Resolve suggested URL relative to request url. |
| - sdch_dictionary_url_ = request_info_.url.Resolve(url_text); |
| + GURL sdch_dictionary_url = request_info_.url.Resolve(url_text); |
| + if (sdch_dictionary_url.is_valid()) { |
| + SdchManager* manager = request()->context()->sdch_manager(); |
|
jar (doing other things)
2014/06/09 23:03:01
Didn't you just get this on line 308... and valida
Randy Smith (Not in Mondays)
2014/06/10 20:38:41
Whoops; copy/paste error. Done.
|
| + if (manager) |
| + manager->FetchDictionary(request_info_.url, sdch_dictionary_url); |
| + } |
| } |
| } |
| @@ -463,6 +460,8 @@ void URLRequestHttpJob::StartTransactionInternal() { |
| } |
| void URLRequestHttpJob::AddExtraHeaders() { |
| + SdchManager* sdch_manager = request()->context()->sdch_manager(); |
| + |
| // Supply Accept-Encoding field only if it is not already provided. |
| // It should be provided IF the content is known to have restrictions on |
| // potential encoding, such as streaming multi-media. |
| @@ -472,19 +471,19 @@ void URLRequestHttpJob::AddExtraHeaders() { |
| // simple_data_source. |
| if (!request_info_.extra_headers.HasHeader( |
| HttpRequestHeaders::kAcceptEncoding)) { |
| - bool advertise_sdch = SdchManager::Global() && |
| - SdchManager::Global()->IsInSupportedDomain(request_->url()); |
| + bool advertise_sdch = sdch_manager && |
| + sdch_manager->IsInSupportedDomain(request_->url()); |
| std::string avail_dictionaries; |
| if (advertise_sdch) { |
| - SdchManager::Global()->GetAvailDictionaryList(request_->url(), |
| - &avail_dictionaries); |
| + sdch_manager->GetAvailDictionaryList(request_->url(), |
| + &avail_dictionaries); |
| // The AllowLatencyExperiment() is only true if we've successfully done a |
| // full SDCH compression recently in this browser session for this host. |
| // Note that for this path, there might be no applicable dictionaries, |
| // and hence we can't participate in the experiment. |
| if (!avail_dictionaries.empty() && |
| - SdchManager::Global()->AllowLatencyExperiment(request_->url())) { |
| + sdch_manager->AllowLatencyExperiment(request_->url())) { |
| // We are participating in the test (or control), and hence we'll |
| // eventually record statistics via either SDCH_EXPERIMENT_DECODE or |
| // SDCH_EXPERIMENT_HOLDBACK, and we'll need some packet timing data. |