OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 // Support modularity by calling to load a new SDCH filter dictionary. | 5 // Support modularity by calling to load a new SDCH filter dictionary. |
6 // Note that this sort of calling can't be done in the /net directory, as it has | 6 // Note that this sort of calling can't be done in the /net directory, as it has |
7 // no concept of the HTTP cache (which is only visible at the browser level). | 7 // no concept of the HTTP cache (which is only visible at the browser level). |
8 | 8 |
9 #ifndef NET_BASE_SDCH_DICTIONARY_FETCHER_H_ | 9 #ifndef NET_BASE_SDCH_DICTIONARY_FETCHER_H_ |
10 #define NET_BASE_SDCH_DICTIONARY_FETCHER_H_ | 10 #define NET_BASE_SDCH_DICTIONARY_FETCHER_H_ |
11 | 11 |
12 #include <queue> | 12 #include <queue> |
13 #include <set> | 13 #include <set> |
14 #include <string> | 14 #include <string> |
15 | 15 |
16 #include "base/memory/scoped_ptr.h" | 16 #include "base/memory/scoped_ptr.h" |
17 #include "base/memory/weak_ptr.h" | 17 #include "base/memory/weak_ptr.h" |
18 #include "base/threading/non_thread_safe.h" | 18 #include "base/threading/non_thread_safe.h" |
19 #include "net/base/sdch_manager.h" | 19 #include "net/base/sdch_manager.h" |
20 #include "net/url_request/url_fetcher_delegate.h" | 20 #include "net/url_request/url_fetcher_delegate.h" |
21 #include "net/url_request/url_request.h" | |
22 #include "net/url_request/url_request_context.h" | |
Ryan Sleevi
2014/08/20 18:27:55
Totes a layering violation here, and pre-existing
Randy Smith (Not in Mondays)
2014/08/20 19:21:50
Makes sense. I'm not clear why I can't leave sdch
| |
21 | 23 |
22 namespace net { | 24 namespace net { |
23 | 25 |
24 class URLFetcher; | 26 class URLRequest; |
25 class URLRequestContextGetter; | 27 class URLRequestContextGetter; |
28 class URLRequestThrottlerEntryInterface; | |
26 | 29 |
27 class NET_EXPORT SdchDictionaryFetcher | 30 class NET_EXPORT SdchDictionaryFetcher |
28 : public URLFetcherDelegate, | 31 : public SdchFetcher, |
29 public SdchFetcher, | 32 public URLRequest::Delegate, |
30 public base::NonThreadSafe { | 33 public base::NonThreadSafe { |
31 public: | 34 public: |
32 // The consumer must guarantee that |*manager| outlives | 35 // The consumer must guarantee that |*manager| outlives |
33 // this object. The current implementation guarantees this by | 36 // this object. The current implementation guarantees this by |
34 // the SdchManager owning this object. | 37 // the SdchManager owning this object. |
35 SdchDictionaryFetcher(SdchManager* manager, | 38 SdchDictionaryFetcher(SdchManager* manager, |
36 scoped_refptr<URLRequestContextGetter> context); | 39 scoped_refptr<URLRequestContextGetter> context); |
37 virtual ~SdchDictionaryFetcher(); | 40 virtual ~SdchDictionaryFetcher(); |
38 | 41 |
39 // Implementation of SdchFetcher class. | 42 // Implementation of SdchFetcher class. |
40 virtual void Schedule(const GURL& dictionary_url) OVERRIDE; | 43 virtual void Schedule(const GURL& dictionary_url) OVERRIDE; |
41 virtual void Cancel() OVERRIDE; | 44 virtual void Cancel() OVERRIDE; |
42 | 45 |
43 private: | 46 private: |
44 // Delay in ms between Schedule and actual download. | 47 // URLRequest::Delegate overrides |
45 // This leaves the URL in a queue, which is de-duped, so that there is less | 48 virtual void OnResponseStarted(URLRequest* request) OVERRIDE; |
46 // chance we'll try to load the same URL multiple times when a pile of | 49 virtual void OnReadCompleted(URLRequest* request, int bytes_read) OVERRIDE; |
47 // page subresources (or tabs opened in parallel) all suggest the dictionary. | |
48 static const int kMsDelayFromRequestTillDownload = 100; | |
49 | 50 |
50 // Ensure the download after the above delay. | 51 // Dispatch the next waiting request, if any. |
51 void ScheduleDelayedRun(); | 52 void DispatchRun(); |
52 | 53 |
53 // Make sure we're processing (or waiting for) the the arrival of the next URL | 54 // Make sure we're processing (or waiting for) the the arrival of the next URL |
54 // in the |fetch_queue_|. | 55 // in the |fetch_queue_|. |
55 void StartFetching(); | 56 void StartFetching(); |
56 | 57 |
57 // Implementation of URLFetcherDelegate. Called after transmission | 58 // Actually dispatch the request for the next dictionary. |
58 // completes (either successfully or with failure). | 59 void StartURLRequest(); |
59 virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE; | 60 |
61 // Used for interactions with URLRequestThrottlingManager. | |
62 base::TimeTicks GetBackoffReleaseTime(); | |
60 | 63 |
61 SdchManager* const manager_; | 64 SdchManager* const manager_; |
62 | 65 |
63 // A queue of URLs that are being used to download dictionaries. | 66 // A queue of URLs that are being used to download dictionaries. |
64 std::queue<GURL> fetch_queue_; | 67 std::queue<GURL> fetch_queue_; |
65 // The currently outstanding URL fetch of a dicitonary. | |
66 // If this is null, then there is no outstanding request. | |
67 scoped_ptr<URLFetcher> current_fetch_; | |
68 | 68 |
69 // Always spread out the dictionary fetches, so that they don't steal | 69 // The request and buffer used for getting the current dictionary |
70 // bandwidth from the actual page load. Create delayed tasks to spread out | 70 // Both are null when a fetch is not in progress. |
71 // the download. | 71 scoped_ptr<URLRequest> current_request_; |
72 base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; | 72 scoped_refptr<IOBuffer> buffer_; |
73 bool task_is_pending_; | 73 |
74 // The currently accumulating dictionary. | |
75 std::string dictionary_; | |
76 | |
77 // Used to determine how long to wait before making a request or doing a | |
78 // retry. | |
79 // | |
80 // Both of them can only be accessed on the IO thread. | |
81 // | |
82 // We need not only the throttler entry for |original_URL|, but also | |
83 // the one for |url|. For example, consider the case that URL A | |
84 // redirects to URL B, for which the server returns a 500 | |
85 // response. In this case, the exponential back-off release time of | |
86 // URL A won't increase. If we retry without considering the | |
87 // back-off constraint of URL B, we may send out too many requests | |
88 // for URL A in a short period of time. | |
89 // | |
90 // Both of these will be NULL if | |
91 // URLRequestContext::throttler_manager() is NULL. | |
92 scoped_refptr<URLRequestThrottlerEntryInterface> | |
93 original_url_throttler_entry_; | |
94 scoped_refptr<URLRequestThrottlerEntryInterface> url_throttler_entry_; | |
74 | 95 |
75 // Althought the SDCH spec does not preclude a server from using a single URL | 96 // Althought the SDCH spec does not preclude a server from using a single URL |
76 // to load several distinct dictionaries (by telling a client to load a | 97 // to load several distinct dictionaries (by telling a client to load a |
77 // dictionary from an URL several times), current implementations seem to have | 98 // dictionary from an URL several times), current implementations seem to have |
78 // that 1-1 relationship (i.e., each URL points at a single dictionary, and | 99 // that 1-1 relationship (i.e., each URL points at a single dictionary, and |
79 // the dictionary content does not change over time, and hence is not worth | 100 // the dictionary content does not change over time, and hence is not worth |
80 // trying to load more than once). In addition, some dictionaries prove | 101 // trying to load more than once). In addition, some dictionaries prove |
81 // unloadable only after downloading them (because they are too large? ...or | 102 // unloadable only after downloading them (because they are too large? ...or |
82 // malformed?). As a protective element, Chromium will *only* load a | 103 // malformed?). As a protective element, Chromium will *only* load a |
83 // dictionary at most once from a given URL (so that it doesn't waste | 104 // dictionary at most once from a given URL (so that it doesn't waste |
84 // bandwidth trying repeatedly). | 105 // bandwidth trying repeatedly). |
85 // The following set lists all the dictionary URLs that we've tried to load, | 106 // The following set lists all the dictionary URLs that we've tried to load, |
86 // so that we won't try to load from an URL more than once. | 107 // so that we won't try to load from an URL more than once. |
87 // TODO(jar): Try to augment the SDCH proposal to include this restiction. | 108 // TODO(jar): Try to augment the SDCH proposal to include this restiction. |
88 std::set<GURL> attempted_load_; | 109 std::set<GURL> attempted_load_; |
89 | 110 |
90 // Store the system_url_request_context_getter to use it when we start | 111 // Store the system_url_request_context_getter to use it when we start |
91 // fetching. | 112 // fetching. |
92 scoped_refptr<URLRequestContextGetter> context_; | 113 scoped_refptr<URLRequestContextGetter> context_; |
93 | 114 |
115 base::WeakPtrFactory<SdchDictionaryFetcher> weak_factory_; | |
116 | |
94 DISALLOW_COPY_AND_ASSIGN(SdchDictionaryFetcher); | 117 DISALLOW_COPY_AND_ASSIGN(SdchDictionaryFetcher); |
95 }; | 118 }; |
96 | 119 |
97 } // namespace net | 120 } // namespace net |
98 | 121 |
99 #endif // NET_BASE_SDCH_DICTIONARY_FETCHER_H_ | 122 #endif // NET_BASE_SDCH_DICTIONARY_FETCHER_H_ |
OLD | NEW |