OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2008 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "net/proxy/proxy_script_fetcher.h" |
| 6 |
| 7 #include "base/file_path.h" |
| 8 #include "base/compiler_specific.h" |
| 9 #include "base/path_service.h" |
| 10 #include "net/base/net_util.h" |
| 11 #include "net/url_request/url_request_unittest.h" |
| 12 #include "testing/gtest/include/gtest/gtest.h" |
| 13 |
| 14 // TODO(eroman): |
| 15 // - Test canceling an outstanding request. |
| 16 // - Test deleting ProxyScriptFetcher while a request is in progress. |
| 17 |
| 18 const wchar_t kDocRoot[] = L"net/data/proxy_script_fetcher_unittest"; |
| 19 |
| 20 struct FetchResult { |
| 21 int code; |
| 22 std::string bytes; |
| 23 }; |
| 24 |
| 25 // A non-mock URL request which can access http:// and file:// urls. |
| 26 class RequestContext : public URLRequestContext { |
| 27 public: |
| 28 RequestContext() { |
| 29 net::ProxyInfo no_proxy; |
| 30 proxy_service_ = net::ProxyService::Create(&no_proxy); |
| 31 http_transaction_factory_ = net::HttpNetworkLayer::CreateFactory( |
| 32 proxy_service_); |
| 33 } |
| 34 ~RequestContext() { |
| 35 delete http_transaction_factory_; |
| 36 delete proxy_service_; |
| 37 } |
| 38 }; |
| 39 |
| 40 // Helper for doing synch fetches. This object lives in SynchFetcher's |
| 41 // |io_thread_| and communicates with SynchFetcher though (|result|, |event|). |
| 42 class SynchFetcherThreadHelper { |
| 43 public: |
| 44 SynchFetcherThreadHelper(base::WaitableEvent* event, FetchResult* result) |
| 45 : event_(event), |
| 46 fetch_result_(result), |
| 47 url_request_context_(NULL), |
| 48 fetcher_(NULL), |
| 49 ALLOW_THIS_IN_INITIALIZER_LIST( |
| 50 callback_(this, &SynchFetcherThreadHelper::OnFetchCompletion)) { |
| 51 url_request_context_ = new RequestContext; |
| 52 fetcher_.reset(net::ProxyScriptFetcher::Create(url_request_context_.get())); |
| 53 } |
| 54 |
| 55 // Starts fetching the script at |url|. Upon completion |event_| will be |
| 56 // signalled, and the bytes read will have been written to |fetch_result_|. |
| 57 void Start(const GURL& url) { |
| 58 fetcher_->Fetch(url, &fetch_result_->bytes, &callback_); |
| 59 } |
| 60 |
| 61 void OnFetchCompletion(int result) { |
| 62 fetch_result_->code = result; |
| 63 event_->Signal(); |
| 64 } |
| 65 |
| 66 private: |
| 67 base::WaitableEvent* event_; |
| 68 FetchResult* fetch_result_; |
| 69 |
| 70 scoped_refptr<URLRequestContext> url_request_context_; |
| 71 |
| 72 scoped_ptr<net::ProxyScriptFetcher> fetcher_; |
| 73 net::CompletionCallbackImpl<SynchFetcherThreadHelper> callback_; |
| 74 }; |
| 75 |
| 76 // Helper that wraps ProxyScriptFetcher::Fetch() with a synchronous interface. |
| 77 // It executes Fetch() on a helper thread (IO_Thread). |
| 78 class SynchFetcher { |
| 79 public: |
| 80 SynchFetcher() |
| 81 : event_(false, false), |
| 82 io_thread_("IO_Thread"), |
| 83 thread_helper_(NULL) { |
| 84 // Start an IO thread. |
| 85 base::Thread::Options options; |
| 86 options.message_loop_type = MessageLoop::TYPE_IO; |
| 87 io_thread_.StartWithOptions(options); |
| 88 |
| 89 // Initialize the state in |io_thread_|. |
| 90 io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( |
| 91 this, &SynchFetcher::Init)); |
| 92 Wait(); |
| 93 } |
| 94 |
| 95 ~SynchFetcher() { |
| 96 // Tear down the state in |io_thread_|. |
| 97 io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( |
| 98 this, &SynchFetcher::Cleanup)); |
| 99 Wait(); |
| 100 } |
| 101 |
| 102 // Synchronously fetch the url. |
| 103 FetchResult Fetch(const GURL& url) { |
| 104 io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( |
| 105 this, &SynchFetcher::AsynchFetch, url)); |
| 106 Wait(); |
| 107 return fetch_result_; |
| 108 } |
| 109 |
| 110 private: |
| 111 // [Runs on |io_thread_|] Allocates the URLRequestContext and the |
| 112 // ProxyScriptFetcher, which live inside |thread_helper_|. |
| 113 void Init() { |
| 114 thread_helper_ = new SynchFetcherThreadHelper(&event_, &fetch_result_); |
| 115 event_.Signal(); |
| 116 } |
| 117 |
| 118 // [Runs on |io_thread_|] Signals |event_| on completion. |
| 119 void AsynchFetch(const GURL& url) { |
| 120 thread_helper_->Start(url); |
| 121 } |
| 122 |
| 123 // [Runs on |io_thread_|] Signals |event_| on cleanup completion. |
| 124 void Cleanup() { |
| 125 delete thread_helper_; |
| 126 thread_helper_ = NULL; |
| 127 MessageLoop::current()->RunAllPending(); |
| 128 event_.Signal(); |
| 129 } |
| 130 |
| 131 void Wait() { |
| 132 event_.Wait(); |
| 133 event_.Reset(); |
| 134 } |
| 135 |
| 136 base::WaitableEvent event_; |
| 137 base::Thread io_thread_; |
| 138 FetchResult fetch_result_; |
| 139 // Holds all the state that lives on the IO thread, for easy cleanup. |
| 140 SynchFetcherThreadHelper* thread_helper_; |
| 141 }; |
| 142 |
| 143 // Template specialization so SynchFetcher does not have to be refcounted. |
| 144 template<> |
| 145 void RunnableMethodTraits<SynchFetcher>::RetainCallee(SynchFetcher* remover) {} |
| 146 template<> |
| 147 void RunnableMethodTraits<SynchFetcher>::ReleaseCallee(SynchFetcher* remover) {} |
| 148 |
| 149 // Required to be in net namespace by FRIEND_TEST. |
| 150 namespace net { |
| 151 |
| 152 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. |
| 153 GURL GetTestFileUrl(const std::string& relpath) { |
| 154 FilePath path; |
| 155 PathService::Get(base::DIR_SOURCE_ROOT, &path); |
| 156 path = path.Append(FILE_PATH_LITERAL("net")); |
| 157 path = path.Append(FILE_PATH_LITERAL("data")); |
| 158 path = path.Append(FILE_PATH_LITERAL("proxy_script_fetcher_unittest")); |
| 159 GURL base_url = net::FilePathToFileURL(path); |
| 160 return GURL(base_url.spec() + "/" + relpath); |
| 161 } |
| 162 |
| 163 TEST(ProxyScriptFetcherTest, FileUrl) { |
| 164 SynchFetcher pac_fetcher; |
| 165 |
| 166 { // Fetch a non-existent file. |
| 167 FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist")); |
| 168 EXPECT_EQ(net::ERR_FILE_NOT_FOUND, result.code); |
| 169 EXPECT_TRUE(result.bytes.empty()); |
| 170 } |
| 171 { // Fetch a file that exists. |
| 172 FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt")); |
| 173 EXPECT_EQ(net::OK, result.code); |
| 174 EXPECT_EQ("-pac.txt-\n", result.bytes); |
| 175 } |
| 176 } |
| 177 |
| 178 // Note that all mime types are allowed for PAC file, to be consistent |
| 179 // with other browsers. |
| 180 TEST(ProxyScriptFetcherTest, HttpMimeType) { |
| 181 TestServer server(kDocRoot); |
| 182 SynchFetcher pac_fetcher; |
| 183 |
| 184 { // Fetch a PAC with mime type "text/plain" |
| 185 GURL url = server.TestServerPage("files/pac.txt"); |
| 186 FetchResult result = pac_fetcher.Fetch(url); |
| 187 EXPECT_EQ(net::OK, result.code); |
| 188 EXPECT_EQ("-pac.txt-\n", result.bytes); |
| 189 } |
| 190 { // Fetch a PAC with mime type "text/html" |
| 191 GURL url = server.TestServerPage("files/pac.html"); |
| 192 FetchResult result = pac_fetcher.Fetch(url); |
| 193 EXPECT_EQ(net::OK, result.code); |
| 194 EXPECT_EQ("-pac.html-\n", result.bytes); |
| 195 } |
| 196 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" |
| 197 GURL url = server.TestServerPage("files/pac.nsproxy"); |
| 198 FetchResult result = pac_fetcher.Fetch(url); |
| 199 EXPECT_EQ(net::OK, result.code); |
| 200 EXPECT_EQ("-pac.nsproxy-\n", result.bytes); |
| 201 } |
| 202 } |
| 203 |
| 204 TEST(ProxyScriptFetcherTest, HttpStatusCode) { |
| 205 TestServer server(kDocRoot); |
| 206 SynchFetcher pac_fetcher; |
| 207 |
| 208 { // Fetch a PAC which gives a 500 -- FAIL |
| 209 GURL url = server.TestServerPage("files/500.pac"); |
| 210 FetchResult result = pac_fetcher.Fetch(url); |
| 211 EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code); |
| 212 EXPECT_TRUE(result.bytes.empty()); |
| 213 } |
| 214 { // Fetch a PAC which gives a 404 -- FAIL |
| 215 GURL url = server.TestServerPage("files/404.pac"); |
| 216 FetchResult result = pac_fetcher.Fetch(url); |
| 217 EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code); |
| 218 EXPECT_TRUE(result.bytes.empty()); |
| 219 } |
| 220 } |
| 221 |
| 222 TEST(ProxyScriptFetcherTest, ContentDisposition) { |
| 223 TestServer server(kDocRoot); |
| 224 SynchFetcher pac_fetcher; |
| 225 |
| 226 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should |
| 227 // have no effect. |
| 228 GURL url = server.TestServerPage("files/downloadable.pac"); |
| 229 FetchResult result = pac_fetcher.Fetch(url); |
| 230 EXPECT_EQ(net::OK, result.code); |
| 231 EXPECT_EQ("-downloadable.pac-\n", result.bytes); |
| 232 } |
| 233 |
| 234 TEST(ProxyScriptFetcherTest, TooLarge) { |
| 235 TestServer server(kDocRoot); |
| 236 SynchFetcher pac_fetcher; |
| 237 |
| 238 // Set the maximum response size to 50 bytes. |
| 239 int prev_size = net::ProxyScriptFetcher::SetSizeConstraintForUnittest(50); |
| 240 |
| 241 // These two URLs are the same file, but are http:// vs file:// |
| 242 GURL urls[] = { |
| 243 server.TestServerPage("files/large-pac.nsproxy"), |
| 244 GetTestFileUrl("large-pac.nsproxy") |
| 245 }; |
| 246 |
| 247 // Try fetching URLs that are 101 bytes large. We should abort the request |
| 248 // after 50 bytes have been read, and fail with a too large error. |
| 249 for (size_t i = 0; i < arraysize(urls); ++i) { |
| 250 const GURL& url = urls[i]; |
| 251 FetchResult result = pac_fetcher.Fetch(url); |
| 252 EXPECT_EQ(net::ERR_FILE_TOO_BIG, result.code); |
| 253 EXPECT_TRUE(result.bytes.empty()); |
| 254 } |
| 255 |
| 256 // Restore the original size bound. |
| 257 net::ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size); |
| 258 |
| 259 { // Make sure we can still fetch regular URLs. |
| 260 GURL url = server.TestServerPage("files/pac.nsproxy"); |
| 261 FetchResult result = pac_fetcher.Fetch(url); |
| 262 EXPECT_EQ(net::OK, result.code); |
| 263 EXPECT_EQ("-pac.nsproxy-\n", result.bytes); |
| 264 } |
| 265 } |
| 266 |
| 267 TEST(ProxyScriptFetcherTest, Hang) { |
| 268 TestServer server(kDocRoot); |
| 269 SynchFetcher pac_fetcher; |
| 270 |
| 271 // Set the timeout period to 0.5 seconds. |
| 272 int prev_timeout = |
| 273 net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500); |
| 274 |
| 275 // Try fetching a URL which takes 1.2 seconds. We should abort the request |
| 276 // after 500 ms, and fail with a timeout error. |
| 277 { GURL url = server.TestServerPage("slow/proxy.pac?1.2"); |
| 278 FetchResult result = pac_fetcher.Fetch(url); |
| 279 EXPECT_EQ(net::ERR_TIMED_OUT, result.code); |
| 280 EXPECT_TRUE(result.bytes.empty()); |
| 281 } |
| 282 |
| 283 // Restore the original timeout period. |
| 284 net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout); |
| 285 |
| 286 { // Make sure we can still fetch regular URLs. |
| 287 GURL url = server.TestServerPage("files/pac.nsproxy"); |
| 288 FetchResult result = pac_fetcher.Fetch(url); |
| 289 EXPECT_EQ(net::OK, result.code); |
| 290 EXPECT_EQ("-pac.nsproxy-\n", result.bytes); |
| 291 } |
| 292 } |
| 293 |
| 294 } // namespace net |
OLD | NEW |