Index: net/base/sdch_dictionary_fetcher.h |
diff --git a/net/base/sdch_dictionary_fetcher.h b/net/base/sdch_dictionary_fetcher.h |
index 82c0b10c6e48a19733021be751986ac5990a60b3..16fc091e08ef9502245aafe75aba4245f7daa428 100644 |
--- a/net/base/sdch_dictionary_fetcher.h |
+++ b/net/base/sdch_dictionary_fetcher.h |
@@ -18,15 +18,18 @@ |
#include "base/threading/non_thread_safe.h" |
#include "net/base/sdch_manager.h" |
#include "net/url_request/url_fetcher_delegate.h" |
+#include "net/url_request/url_request.h" |
+#include "net/url_request/url_request_context.h" |
Ryan Sleevi
2014/08/20 18:27:55
Totes a layering violation here, and pre-existing
Randy Smith (Not in Mondays)
2014/08/20 19:21:50
Makes sense. I'm not clear why I can't leave sdch
|
namespace net { |
-class URLFetcher; |
+class URLRequest; |
class URLRequestContextGetter; |
+class URLRequestThrottlerEntryInterface; |
class NET_EXPORT SdchDictionaryFetcher |
- : public URLFetcherDelegate, |
- public SdchFetcher, |
+ : public SdchFetcher, |
+ public URLRequest::Delegate, |
public base::NonThreadSafe { |
public: |
// The consumer must guarantee that |*manager| outlives |
@@ -41,36 +44,54 @@ class NET_EXPORT SdchDictionaryFetcher |
virtual void Cancel() OVERRIDE; |
private: |
- // Delay in ms between Schedule and actual download. |
- // This leaves the URL in a queue, which is de-duped, so that there is less |
- // chance we'll try to load the same URL multiple times when a pile of |
- // page subresources (or tabs opened in parallel) all suggest the dictionary. |
- static const int kMsDelayFromRequestTillDownload = 100; |
+ // URLRequest::Delegate overrides |
+ virtual void OnResponseStarted(URLRequest* request) OVERRIDE; |
+ virtual void OnReadCompleted(URLRequest* request, int bytes_read) OVERRIDE; |
- // Ensure the download after the above delay. |
- void ScheduleDelayedRun(); |
+ // Dispatch the next waiting request, if any. |
+ void DispatchRun(); |
// Make sure we're processing (or waiting for) the the arrival of the next URL |
// in the |fetch_queue_|. |
void StartFetching(); |
- // Implementation of URLFetcherDelegate. Called after transmission |
- // completes (either successfully or with failure). |
- virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE; |
+ // Actually dispatch the request for the next dictionary. |
+ void StartURLRequest(); |
+ |
+ // Used for interactions with URLRequestThrottlingManager. |
+ base::TimeTicks GetBackoffReleaseTime(); |
SdchManager* const manager_; |
// A queue of URLs that are being used to download dictionaries. |
std::queue<GURL> fetch_queue_; |
- // The currently outstanding URL fetch of a dicitonary. |
- // If this is null, then there is no outstanding request. |
- scoped_ptr<URLFetcher> current_fetch_; |
- // Always spread out the dictionary fetches, so that they don't steal |
- // bandwidth from the actual page load. Create delayed tasks to spread out |
- // the download. |
- base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; |
- bool task_is_pending_; |
+ // The request and buffer used for getting the current dictionary |
+ // Both are null when a fetch is not in progress. |
+ scoped_ptr<URLRequest> current_request_; |
+ scoped_refptr<IOBuffer> buffer_; |
+ |
+ // The currently accumulating dictionary. |
+ std::string dictionary_; |
+ |
+ // Used to determine how long to wait before making a request or doing a |
+ // retry. |
+ // |
+ // Both of them can only be accessed on the IO thread. |
+ // |
+ // We need not only the throttler entry for |original_URL|, but also |
+ // the one for |url|. For example, consider the case that URL A |
+ // redirects to URL B, for which the server returns a 500 |
+ // response. In this case, the exponential back-off release time of |
+ // URL A won't increase. If we retry without considering the |
+ // back-off constraint of URL B, we may send out too many requests |
+ // for URL A in a short period of time. |
+ // |
+ // Both of these will be NULL if |
+ // URLRequestContext::throttler_manager() is NULL. |
+ scoped_refptr<URLRequestThrottlerEntryInterface> |
+ original_url_throttler_entry_; |
+ scoped_refptr<URLRequestThrottlerEntryInterface> url_throttler_entry_; |
// Althought the SDCH spec does not preclude a server from using a single URL |
// to load several distinct dictionaries (by telling a client to load a |
@@ -91,6 +112,8 @@ class NET_EXPORT SdchDictionaryFetcher |
// fetching. |
scoped_refptr<URLRequestContextGetter> context_; |
+ base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; |
+ |
DISALLOW_COPY_AND_ASSIGN(SdchDictionaryFetcher); |
}; |