OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "net/proxy/proxy_script_fetcher_impl.h" | |
6 | |
7 #include <string> | |
8 | |
9 #include "base/compiler_specific.h" | |
10 #include "base/files/file_path.h" | |
11 #include "base/path_service.h" | |
12 #include "base/strings/utf_string_conversions.h" | |
13 #include "net/base/filename_util.h" | |
14 #include "net/base/load_flags.h" | |
15 #include "net/base/network_delegate_impl.h" | |
16 #include "net/base/test_completion_callback.h" | |
17 #include "net/cert/mock_cert_verifier.h" | |
18 #include "net/disk_cache/disk_cache.h" | |
19 #include "net/dns/mock_host_resolver.h" | |
20 #include "net/http/http_cache.h" | |
21 #include "net/http/http_network_session.h" | |
22 #include "net/http/http_server_properties_impl.h" | |
23 #include "net/http/transport_security_state.h" | |
24 #include "net/ssl/ssl_config_service_defaults.h" | |
25 #include "net/test/spawned_test_server/spawned_test_server.h" | |
26 #include "net/url_request/url_request_context_storage.h" | |
27 #include "net/url_request/url_request_file_job.h" | |
28 #include "net/url_request/url_request_job_factory_impl.h" | |
29 #include "net/url_request/url_request_test_util.h" | |
30 #include "testing/gtest/include/gtest/gtest.h" | |
31 #include "testing/platform_test.h" | |
32 | |
33 #if !defined(DISABLE_FILE_SUPPORT) | |
34 #include "net/url_request/file_protocol_handler.h" | |
35 #endif | |
36 | |
37 using base::ASCIIToUTF16; | |
38 | |
39 namespace net { | |
40 | |
41 // TODO(eroman): | |
42 // - Test canceling an outstanding request. | |
43 // - Test deleting ProxyScriptFetcher while a request is in progress. | |
44 | |
45 namespace { | |
46 | |
47 const base::FilePath::CharType kDocRoot[] = | |
48 FILE_PATH_LITERAL("net/data/proxy_script_fetcher_unittest"); | |
49 | |
50 struct FetchResult { | |
51 int code; | |
52 base::string16 text; | |
53 }; | |
54 | |
55 // A non-mock URL request which can access http:// and file:// urls, in the case | |
56 // the tests were built with file support. | |
57 class RequestContext : public URLRequestContext { | |
58 public: | |
59 RequestContext() : storage_(this) { | |
60 ProxyConfig no_proxy; | |
61 storage_.set_host_resolver(scoped_ptr<HostResolver>(new MockHostResolver)); | |
62 storage_.set_cert_verifier(new MockCertVerifier); | |
63 storage_.set_transport_security_state(new TransportSecurityState); | |
64 storage_.set_proxy_service(ProxyService::CreateFixed(no_proxy)); | |
65 storage_.set_ssl_config_service(new SSLConfigServiceDefaults); | |
66 storage_.set_http_server_properties( | |
67 scoped_ptr<HttpServerProperties>(new HttpServerPropertiesImpl())); | |
68 | |
69 HttpNetworkSession::Params params; | |
70 params.host_resolver = host_resolver(); | |
71 params.cert_verifier = cert_verifier(); | |
72 params.transport_security_state = transport_security_state(); | |
73 params.proxy_service = proxy_service(); | |
74 params.ssl_config_service = ssl_config_service(); | |
75 params.http_server_properties = http_server_properties(); | |
76 scoped_refptr<HttpNetworkSession> network_session( | |
77 new HttpNetworkSession(params)); | |
78 storage_.set_http_transaction_factory(new HttpCache( | |
79 network_session.get(), HttpCache::DefaultBackend::InMemory(0))); | |
80 URLRequestJobFactoryImpl* job_factory = new URLRequestJobFactoryImpl(); | |
81 #if !defined(DISABLE_FILE_SUPPORT) | |
82 job_factory->SetProtocolHandler( | |
83 "file", new FileProtocolHandler(base::MessageLoopProxy::current())); | |
84 #endif | |
85 storage_.set_job_factory(job_factory); | |
86 } | |
87 | |
88 ~RequestContext() override { AssertNoURLRequests(); } | |
89 | |
90 private: | |
91 URLRequestContextStorage storage_; | |
92 }; | |
93 | |
94 #if !defined(DISABLE_FILE_SUPPORT) | |
95 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. | |
96 GURL GetTestFileUrl(const std::string& relpath) { | |
97 base::FilePath path; | |
98 PathService::Get(base::DIR_SOURCE_ROOT, &path); | |
99 path = path.AppendASCII("net"); | |
100 path = path.AppendASCII("data"); | |
101 path = path.AppendASCII("proxy_script_fetcher_unittest"); | |
102 GURL base_url = FilePathToFileURL(path); | |
103 return GURL(base_url.spec() + "/" + relpath); | |
104 } | |
105 #endif // !defined(DISABLE_FILE_SUPPORT) | |
106 | |
107 // Really simple NetworkDelegate so we can allow local file access on ChromeOS | |
108 // without introducing layering violations. Also causes a test failure if a | |
109 // request is seen that doesn't set a load flag to bypass revocation checking. | |
110 | |
111 class BasicNetworkDelegate : public NetworkDelegateImpl { | |
112 public: | |
113 BasicNetworkDelegate() {} | |
114 ~BasicNetworkDelegate() override {} | |
115 | |
116 private: | |
117 int OnBeforeURLRequest(URLRequest* request, | |
118 const CompletionCallback& callback, | |
119 GURL* new_url) override { | |
120 EXPECT_TRUE(request->load_flags() & LOAD_DISABLE_CERT_REVOCATION_CHECKING); | |
121 return OK; | |
122 } | |
123 | |
124 int OnBeforeSendHeaders(URLRequest* request, | |
125 const CompletionCallback& callback, | |
126 HttpRequestHeaders* headers) override { | |
127 return OK; | |
128 } | |
129 | |
130 void OnSendHeaders(URLRequest* request, | |
131 const HttpRequestHeaders& headers) override {} | |
132 | |
133 int OnHeadersReceived( | |
134 URLRequest* request, | |
135 const CompletionCallback& callback, | |
136 const HttpResponseHeaders* original_response_headers, | |
137 scoped_refptr<HttpResponseHeaders>* override_response_headers, | |
138 GURL* allowed_unsafe_redirect_url) override { | |
139 return OK; | |
140 } | |
141 | |
142 void OnBeforeRedirect(URLRequest* request, | |
143 const GURL& new_location) override {} | |
144 | |
145 void OnResponseStarted(URLRequest* request) override {} | |
146 | |
147 void OnRawBytesRead(const URLRequest& request, int bytes_read) override {} | |
148 | |
149 void OnCompleted(URLRequest* request, bool started) override {} | |
150 | |
151 void OnURLRequestDestroyed(URLRequest* request) override {} | |
152 | |
153 void OnPACScriptError(int line_number, const base::string16& error) override { | |
154 } | |
155 | |
156 NetworkDelegate::AuthRequiredResponse OnAuthRequired( | |
157 URLRequest* request, | |
158 const AuthChallengeInfo& auth_info, | |
159 const AuthCallback& callback, | |
160 AuthCredentials* credentials) override { | |
161 return NetworkDelegate::AUTH_REQUIRED_RESPONSE_NO_ACTION; | |
162 } | |
163 | |
164 bool OnCanGetCookies(const URLRequest& request, | |
165 const CookieList& cookie_list) override { | |
166 return true; | |
167 } | |
168 | |
169 bool OnCanSetCookie(const URLRequest& request, | |
170 const std::string& cookie_line, | |
171 CookieOptions* options) override { | |
172 return true; | |
173 } | |
174 | |
175 bool OnCanAccessFile(const net::URLRequest& request, | |
176 const base::FilePath& path) const override { | |
177 return true; | |
178 } | |
179 bool OnCanThrottleRequest(const URLRequest& request) const override { | |
180 return false; | |
181 } | |
182 | |
183 DISALLOW_COPY_AND_ASSIGN(BasicNetworkDelegate); | |
184 }; | |
185 | |
186 } // namespace | |
187 | |
188 class ProxyScriptFetcherImplTest : public PlatformTest { | |
189 public: | |
190 ProxyScriptFetcherImplTest() | |
191 : test_server_(SpawnedTestServer::TYPE_HTTP, | |
192 net::SpawnedTestServer::kLocalhost, | |
193 base::FilePath(kDocRoot)) { | |
194 context_.set_network_delegate(&network_delegate_); | |
195 } | |
196 | |
197 protected: | |
198 SpawnedTestServer test_server_; | |
199 BasicNetworkDelegate network_delegate_; | |
200 RequestContext context_; | |
201 }; | |
202 | |
203 #if !defined(DISABLE_FILE_SUPPORT) | |
204 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { | |
205 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
206 | |
207 { // Fetch a non-existent file. | |
208 base::string16 text; | |
209 TestCompletionCallback callback; | |
210 int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"), | |
211 &text, callback.callback()); | |
212 EXPECT_EQ(ERR_IO_PENDING, result); | |
213 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); | |
214 EXPECT_TRUE(text.empty()); | |
215 } | |
216 { // Fetch a file that exists. | |
217 base::string16 text; | |
218 TestCompletionCallback callback; | |
219 int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"), | |
220 &text, callback.callback()); | |
221 EXPECT_EQ(ERR_IO_PENDING, result); | |
222 EXPECT_EQ(OK, callback.WaitForResult()); | |
223 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | |
224 } | |
225 } | |
226 #endif // !defined(DISABLE_FILE_SUPPORT) | |
227 | |
228 // Note that all mime types are allowed for PAC file, to be consistent | |
229 // with other browsers. | |
230 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { | |
231 ASSERT_TRUE(test_server_.Start()); | |
232 | |
233 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
234 | |
235 { // Fetch a PAC with mime type "text/plain" | |
236 GURL url(test_server_.GetURL("files/pac.txt")); | |
237 base::string16 text; | |
238 TestCompletionCallback callback; | |
239 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
240 EXPECT_EQ(ERR_IO_PENDING, result); | |
241 EXPECT_EQ(OK, callback.WaitForResult()); | |
242 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | |
243 } | |
244 { // Fetch a PAC with mime type "text/html" | |
245 GURL url(test_server_.GetURL("files/pac.html")); | |
246 base::string16 text; | |
247 TestCompletionCallback callback; | |
248 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
249 EXPECT_EQ(ERR_IO_PENDING, result); | |
250 EXPECT_EQ(OK, callback.WaitForResult()); | |
251 EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text); | |
252 } | |
253 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" | |
254 GURL url(test_server_.GetURL("files/pac.nsproxy")); | |
255 base::string16 text; | |
256 TestCompletionCallback callback; | |
257 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
258 EXPECT_EQ(ERR_IO_PENDING, result); | |
259 EXPECT_EQ(OK, callback.WaitForResult()); | |
260 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | |
261 } | |
262 } | |
263 | |
264 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { | |
265 ASSERT_TRUE(test_server_.Start()); | |
266 | |
267 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
268 | |
269 { // Fetch a PAC which gives a 500 -- FAIL | |
270 GURL url(test_server_.GetURL("files/500.pac")); | |
271 base::string16 text; | |
272 TestCompletionCallback callback; | |
273 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
274 EXPECT_EQ(ERR_IO_PENDING, result); | |
275 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | |
276 EXPECT_TRUE(text.empty()); | |
277 } | |
278 { // Fetch a PAC which gives a 404 -- FAIL | |
279 GURL url(test_server_.GetURL("files/404.pac")); | |
280 base::string16 text; | |
281 TestCompletionCallback callback; | |
282 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
283 EXPECT_EQ(ERR_IO_PENDING, result); | |
284 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | |
285 EXPECT_TRUE(text.empty()); | |
286 } | |
287 } | |
288 | |
289 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { | |
290 ASSERT_TRUE(test_server_.Start()); | |
291 | |
292 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
293 | |
294 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should | |
295 // have no effect. | |
296 GURL url(test_server_.GetURL("files/downloadable.pac")); | |
297 base::string16 text; | |
298 TestCompletionCallback callback; | |
299 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
300 EXPECT_EQ(ERR_IO_PENDING, result); | |
301 EXPECT_EQ(OK, callback.WaitForResult()); | |
302 EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); | |
303 } | |
304 | |
305 // Verifies that PAC scripts are not being cached. | |
306 TEST_F(ProxyScriptFetcherImplTest, NoCache) { | |
307 ASSERT_TRUE(test_server_.Start()); | |
308 | |
309 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
310 | |
311 // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. | |
312 GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); | |
313 { | |
314 base::string16 text; | |
315 TestCompletionCallback callback; | |
316 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
317 EXPECT_EQ(ERR_IO_PENDING, result); | |
318 EXPECT_EQ(OK, callback.WaitForResult()); | |
319 EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text); | |
320 } | |
321 | |
322 // Kill the HTTP server. | |
323 ASSERT_TRUE(test_server_.Stop()); | |
324 | |
325 // Try to fetch the file again. Since the server is not running anymore, the | |
326 // call should fail, thus indicating that the file was not fetched from the | |
327 // local cache. | |
328 { | |
329 base::string16 text; | |
330 TestCompletionCallback callback; | |
331 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
332 EXPECT_EQ(ERR_IO_PENDING, result); | |
333 | |
334 // Expect any error. The exact error varies by platform. | |
335 EXPECT_NE(OK, callback.WaitForResult()); | |
336 } | |
337 } | |
338 | |
339 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { | |
340 ASSERT_TRUE(test_server_.Start()); | |
341 | |
342 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
343 | |
344 // Set the maximum response size to 50 bytes. | |
345 int prev_size = pac_fetcher.SetSizeConstraint(50); | |
346 | |
347 // These two URLs are the same file, but are http:// vs file:// | |
348 GURL urls[] = { | |
349 test_server_.GetURL("files/large-pac.nsproxy"), | |
350 #if !defined(DISABLE_FILE_SUPPORT) | |
351 GetTestFileUrl("large-pac.nsproxy") | |
352 #endif | |
353 }; | |
354 | |
355 // Try fetching URLs that are 101 bytes large. We should abort the request | |
356 // after 50 bytes have been read, and fail with a too large error. | |
357 for (size_t i = 0; i < arraysize(urls); ++i) { | |
358 const GURL& url = urls[i]; | |
359 base::string16 text; | |
360 TestCompletionCallback callback; | |
361 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
362 EXPECT_EQ(ERR_IO_PENDING, result); | |
363 EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult()); | |
364 EXPECT_TRUE(text.empty()); | |
365 } | |
366 | |
367 // Restore the original size bound. | |
368 pac_fetcher.SetSizeConstraint(prev_size); | |
369 | |
370 { // Make sure we can still fetch regular URLs. | |
371 GURL url(test_server_.GetURL("files/pac.nsproxy")); | |
372 base::string16 text; | |
373 TestCompletionCallback callback; | |
374 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
375 EXPECT_EQ(ERR_IO_PENDING, result); | |
376 EXPECT_EQ(OK, callback.WaitForResult()); | |
377 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | |
378 } | |
379 } | |
380 | |
381 TEST_F(ProxyScriptFetcherImplTest, Hang) { | |
382 ASSERT_TRUE(test_server_.Start()); | |
383 | |
384 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
385 | |
386 // Set the timeout period to 0.5 seconds. | |
387 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint( | |
388 base::TimeDelta::FromMilliseconds(500)); | |
389 | |
390 // Try fetching a URL which takes 1.2 seconds. We should abort the request | |
391 // after 500 ms, and fail with a timeout error. | |
392 { | |
393 GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); | |
394 base::string16 text; | |
395 TestCompletionCallback callback; | |
396 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
397 EXPECT_EQ(ERR_IO_PENDING, result); | |
398 EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult()); | |
399 EXPECT_TRUE(text.empty()); | |
400 } | |
401 | |
402 // Restore the original timeout period. | |
403 pac_fetcher.SetTimeoutConstraint(prev_timeout); | |
404 | |
405 { // Make sure we can still fetch regular URLs. | |
406 GURL url(test_server_.GetURL("files/pac.nsproxy")); | |
407 base::string16 text; | |
408 TestCompletionCallback callback; | |
409 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
410 EXPECT_EQ(ERR_IO_PENDING, result); | |
411 EXPECT_EQ(OK, callback.WaitForResult()); | |
412 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | |
413 } | |
414 } | |
415 | |
416 // The ProxyScriptFetcher should decode any content-codings | |
417 // (like gzip, bzip, etc.), and apply any charset conversions to yield | |
418 // UTF8. | |
419 TEST_F(ProxyScriptFetcherImplTest, Encodings) { | |
420 ASSERT_TRUE(test_server_.Start()); | |
421 | |
422 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
423 | |
424 // Test a response that is gzip-encoded -- should get inflated. | |
425 { | |
426 GURL url(test_server_.GetURL("files/gzipped_pac")); | |
427 base::string16 text; | |
428 TestCompletionCallback callback; | |
429 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
430 EXPECT_EQ(ERR_IO_PENDING, result); | |
431 EXPECT_EQ(OK, callback.WaitForResult()); | |
432 EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text); | |
433 } | |
434 | |
435 // Test a response that was served as UTF-16 (BE). It should | |
436 // be converted to UTF8. | |
437 { | |
438 GURL url(test_server_.GetURL("files/utf16be_pac")); | |
439 base::string16 text; | |
440 TestCompletionCallback callback; | |
441 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
442 EXPECT_EQ(ERR_IO_PENDING, result); | |
443 EXPECT_EQ(OK, callback.WaitForResult()); | |
444 EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text); | |
445 } | |
446 } | |
447 | |
448 TEST_F(ProxyScriptFetcherImplTest, DataURLs) { | |
449 ProxyScriptFetcherImpl pac_fetcher(&context_); | |
450 | |
451 const char kEncodedUrl[] = | |
452 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R" | |
453 "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl" | |
454 "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0="; | |
455 const char kPacScript[] = | |
456 "function FindProxyForURL(url, host) {\n" | |
457 " if (host == 'foobar.com')\n" | |
458 " return 'PROXY blackhole:80';\n" | |
459 " return 'DIRECT';\n" | |
460 "}"; | |
461 | |
462 // Test fetching a "data:"-url containing a base64 encoded PAC script. | |
463 { | |
464 GURL url(kEncodedUrl); | |
465 base::string16 text; | |
466 TestCompletionCallback callback; | |
467 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
468 EXPECT_EQ(OK, result); | |
469 EXPECT_EQ(ASCIIToUTF16(kPacScript), text); | |
470 } | |
471 | |
472 const char kEncodedUrlBroken[] = | |
473 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R"; | |
474 | |
475 // Test a broken "data:"-url containing a base64 encoded PAC script. | |
476 { | |
477 GURL url(kEncodedUrlBroken); | |
478 base::string16 text; | |
479 TestCompletionCallback callback; | |
480 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | |
481 EXPECT_EQ(ERR_FAILED, result); | |
482 } | |
483 } | |
484 | |
485 } // namespace net | |
OLD | NEW |