Chromium Code Reviews| Index: net/base/sdch_dictionary_fetcher.h |
| diff --git a/net/base/sdch_dictionary_fetcher.h b/net/base/sdch_dictionary_fetcher.h |
| index 82c0b10c6e48a19733021be751986ac5990a60b3..16fc091e08ef9502245aafe75aba4245f7daa428 100644 |
| --- a/net/base/sdch_dictionary_fetcher.h |
| +++ b/net/base/sdch_dictionary_fetcher.h |
| @@ -18,15 +18,18 @@ |
| #include "base/threading/non_thread_safe.h" |
| #include "net/base/sdch_manager.h" |
| #include "net/url_request/url_fetcher_delegate.h" |
| +#include "net/url_request/url_request.h" |
| +#include "net/url_request/url_request_context.h" |
|
Ryan Sleevi
2014/08/25 20:00:01
You don't need this because of the forward declara
Randy Smith (Not in Mondays)
2014/08/26 15:34:17
Don't need it anymore, actually; I'm using URLRequ
|
| namespace net { |
| -class URLFetcher; |
| +class URLRequest; |
| class URLRequestContextGetter; |
| +class URLRequestThrottlerEntryInterface; |
| class NET_EXPORT SdchDictionaryFetcher |
| - : public URLFetcherDelegate, |
| - public SdchFetcher, |
| + : public SdchFetcher, |
| + public URLRequest::Delegate, |
| public base::NonThreadSafe { |
| public: |
| // The consumer must guarantee that |*manager| outlives |
| @@ -41,36 +44,54 @@ class NET_EXPORT SdchDictionaryFetcher |
| virtual void Cancel() OVERRIDE; |
| private: |
| - // Delay in ms between Schedule and actual download. |
| - // This leaves the URL in a queue, which is de-duped, so that there is less |
| - // chance we'll try to load the same URL multiple times when a pile of |
| - // page subresources (or tabs opened in parallel) all suggest the dictionary. |
| - static const int kMsDelayFromRequestTillDownload = 100; |
| + // URLRequest::Delegate overrides |
| + virtual void OnResponseStarted(URLRequest* request) OVERRIDE; |
| + virtual void OnReadCompleted(URLRequest* request, int bytes_read) OVERRIDE; |
| - // Ensure the download after the above delay. |
| - void ScheduleDelayedRun(); |
| + // Dispatch the next waiting request, if any. |
| + void DispatchRun(); |
| // Make sure we're processing (or waiting for) the the arrival of the next URL |
| // in the |fetch_queue_|. |
| void StartFetching(); |
| - // Implementation of URLFetcherDelegate. Called after transmission |
| - // completes (either successfully or with failure). |
| - virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE; |
| + // Actually dispatch the request for the next dictionary. |
|
Ryan Sleevi
2014/08/25 20:00:01
Can you expand this comment, particularly in light
Randy Smith (Not in Mondays)
2014/09/02 19:35:03
Done; let me know what you think.
|
| + void StartURLRequest(); |
| + |
| + // Used for interactions with URLRequestThrottlingManager. |
| + base::TimeTicks GetBackoffReleaseTime(); |
| SdchManager* const manager_; |
| // A queue of URLs that are being used to download dictionaries. |
| std::queue<GURL> fetch_queue_; |
| - // The currently outstanding URL fetch of a dicitonary. |
| - // If this is null, then there is no outstanding request. |
| - scoped_ptr<URLFetcher> current_fetch_; |
| - // Always spread out the dictionary fetches, so that they don't steal |
| - // bandwidth from the actual page load. Create delayed tasks to spread out |
| - // the download. |
| - base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; |
| - bool task_is_pending_; |
| + // The request and buffer used for getting the current dictionary |
| + // Both are null when a fetch is not in progress. |
| + scoped_ptr<URLRequest> current_request_; |
| + scoped_refptr<IOBuffer> buffer_; |
| + |
| + // The currently accumulating dictionary. |
| + std::string dictionary_; |
| + |
| + // Used to determine how long to wait before making a request or doing a |
| + // retry. |
| + // |
| + // Both of them can only be accessed on the IO thread. |
|
Ryan Sleevi
2014/08/25 20:00:01
Well, this class is base::NonThreadSafe, so I woul
Randy Smith (Not in Mondays)
2014/08/26 15:34:17
I believe that this class is used exclusively on t
|
| + // |
| + // We need not only the throttler entry for |original_URL|, but also |
| + // the one for |url|. For example, consider the case that URL A |
| + // redirects to URL B, for which the server returns a 500 |
| + // response. In this case, the exponential back-off release time of |
| + // URL A won't increase. If we retry without considering the |
| + // back-off constraint of URL B, we may send out too many requests |
| + // for URL A in a short period of time. |
|
Ryan Sleevi
2014/08/25 20:00:01
Please reword this comment without the use of "we"
Randy Smith (Not in Mondays)
2014/08/26 15:34:17
Done both here and in url_fetcher_core.h.
|
| + // |
| + // Both of these will be NULL if |
| + // URLRequestContext::throttler_manager() is NULL. |
| + scoped_refptr<URLRequestThrottlerEntryInterface> |
| + original_url_throttler_entry_; |
| + scoped_refptr<URLRequestThrottlerEntryInterface> url_throttler_entry_; |
| // Althought the SDCH spec does not preclude a server from using a single URL |
| // to load several distinct dictionaries (by telling a client to load a |
| @@ -91,6 +112,8 @@ class NET_EXPORT SdchDictionaryFetcher |
| // fetching. |
| scoped_refptr<URLRequestContextGetter> context_; |
| + base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; |
| + |
| DISALLOW_COPY_AND_ASSIGN(SdchDictionaryFetcher); |
| }; |