OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "net/proxy/proxy_script_fetcher_impl.h" | 5 #include "net/proxy/proxy_script_fetcher_impl.h" |
6 | 6 |
7 #include <string> | 7 #include <string> |
8 | 8 |
9 #include "base/compiler_specific.h" | 9 #include "base/compiler_specific.h" |
10 #include "base/files/file_path.h" | 10 #include "base/files/file_path.h" |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
71 scoped_refptr<HttpNetworkSession> network_session( | 71 scoped_refptr<HttpNetworkSession> network_session( |
72 new HttpNetworkSession(params)); | 72 new HttpNetworkSession(params)); |
73 storage_.set_http_transaction_factory(new HttpCache( | 73 storage_.set_http_transaction_factory(new HttpCache( |
74 network_session.get(), HttpCache::DefaultBackend::InMemory(0))); | 74 network_session.get(), HttpCache::DefaultBackend::InMemory(0))); |
75 URLRequestJobFactoryImpl* job_factory = new URLRequestJobFactoryImpl(); | 75 URLRequestJobFactoryImpl* job_factory = new URLRequestJobFactoryImpl(); |
76 job_factory->SetProtocolHandler( | 76 job_factory->SetProtocolHandler( |
77 "file", new FileProtocolHandler(base::MessageLoopProxy::current())); | 77 "file", new FileProtocolHandler(base::MessageLoopProxy::current())); |
78 storage_.set_job_factory(job_factory); | 78 storage_.set_job_factory(job_factory); |
79 } | 79 } |
80 | 80 |
81 virtual ~RequestContext() { | 81 virtual ~RequestContext() {} |
82 } | |
83 | 82 |
84 private: | 83 private: |
85 URLRequestContextStorage storage_; | 84 URLRequestContextStorage storage_; |
86 }; | 85 }; |
87 | 86 |
88 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. | 87 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. |
89 GURL GetTestFileUrl(const std::string& relpath) { | 88 GURL GetTestFileUrl(const std::string& relpath) { |
90 base::FilePath path; | 89 base::FilePath path; |
91 PathService::Get(base::DIR_SOURCE_ROOT, &path); | 90 PathService::Get(base::DIR_SOURCE_ROOT, &path); |
92 path = path.AppendASCII("net"); | 91 path = path.AppendASCII("net"); |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
195 | 194 |
196 protected: | 195 protected: |
197 SpawnedTestServer test_server_; | 196 SpawnedTestServer test_server_; |
198 BasicNetworkDelegate network_delegate_; | 197 BasicNetworkDelegate network_delegate_; |
199 RequestContext context_; | 198 RequestContext context_; |
200 }; | 199 }; |
201 | 200 |
202 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { | 201 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { |
203 ProxyScriptFetcherImpl pac_fetcher(&context_); | 202 ProxyScriptFetcherImpl pac_fetcher(&context_); |
204 | 203 |
205 { // Fetch a non-existent file. | 204 { // Fetch a non-existent file. |
206 base::string16 text; | 205 base::string16 text; |
207 TestCompletionCallback callback; | 206 TestCompletionCallback callback; |
208 int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"), | 207 int result = pac_fetcher.Fetch( |
209 &text, callback.callback()); | 208 GetTestFileUrl("does-not-exist"), &text, callback.callback()); |
210 EXPECT_EQ(ERR_IO_PENDING, result); | 209 EXPECT_EQ(ERR_IO_PENDING, result); |
211 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); | 210 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); |
212 EXPECT_TRUE(text.empty()); | 211 EXPECT_TRUE(text.empty()); |
213 } | 212 } |
214 { // Fetch a file that exists. | 213 { // Fetch a file that exists. |
215 base::string16 text; | 214 base::string16 text; |
216 TestCompletionCallback callback; | 215 TestCompletionCallback callback; |
217 int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"), | 216 int result = pac_fetcher.Fetch( |
218 &text, callback.callback()); | 217 GetTestFileUrl("pac.txt"), &text, callback.callback()); |
219 EXPECT_EQ(ERR_IO_PENDING, result); | 218 EXPECT_EQ(ERR_IO_PENDING, result); |
220 EXPECT_EQ(OK, callback.WaitForResult()); | 219 EXPECT_EQ(OK, callback.WaitForResult()); |
221 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | 220 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); |
222 } | 221 } |
223 } | 222 } |
224 | 223 |
225 // Note that all mime types are allowed for PAC file, to be consistent | 224 // Note that all mime types are allowed for PAC file, to be consistent |
226 // with other browsers. | 225 // with other browsers. |
227 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { | 226 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { |
228 ASSERT_TRUE(test_server_.Start()); | 227 ASSERT_TRUE(test_server_.Start()); |
229 | 228 |
230 ProxyScriptFetcherImpl pac_fetcher(&context_); | 229 ProxyScriptFetcherImpl pac_fetcher(&context_); |
231 | 230 |
232 { // Fetch a PAC with mime type "text/plain" | 231 { // Fetch a PAC with mime type "text/plain" |
233 GURL url(test_server_.GetURL("files/pac.txt")); | 232 GURL url(test_server_.GetURL("files/pac.txt")); |
234 base::string16 text; | 233 base::string16 text; |
235 TestCompletionCallback callback; | 234 TestCompletionCallback callback; |
236 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 235 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
237 EXPECT_EQ(ERR_IO_PENDING, result); | 236 EXPECT_EQ(ERR_IO_PENDING, result); |
238 EXPECT_EQ(OK, callback.WaitForResult()); | 237 EXPECT_EQ(OK, callback.WaitForResult()); |
239 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | 238 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); |
240 } | 239 } |
241 { // Fetch a PAC with mime type "text/html" | 240 { // Fetch a PAC with mime type "text/html" |
242 GURL url(test_server_.GetURL("files/pac.html")); | 241 GURL url(test_server_.GetURL("files/pac.html")); |
243 base::string16 text; | 242 base::string16 text; |
244 TestCompletionCallback callback; | 243 TestCompletionCallback callback; |
245 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 244 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
246 EXPECT_EQ(ERR_IO_PENDING, result); | 245 EXPECT_EQ(ERR_IO_PENDING, result); |
247 EXPECT_EQ(OK, callback.WaitForResult()); | 246 EXPECT_EQ(OK, callback.WaitForResult()); |
248 EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text); | 247 EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text); |
249 } | 248 } |
250 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" | 249 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" |
251 GURL url(test_server_.GetURL("files/pac.nsproxy")); | 250 GURL url(test_server_.GetURL("files/pac.nsproxy")); |
252 base::string16 text; | 251 base::string16 text; |
253 TestCompletionCallback callback; | 252 TestCompletionCallback callback; |
254 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 253 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
255 EXPECT_EQ(ERR_IO_PENDING, result); | 254 EXPECT_EQ(ERR_IO_PENDING, result); |
256 EXPECT_EQ(OK, callback.WaitForResult()); | 255 EXPECT_EQ(OK, callback.WaitForResult()); |
257 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 256 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
258 } | 257 } |
259 } | 258 } |
260 | 259 |
261 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { | 260 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { |
262 ASSERT_TRUE(test_server_.Start()); | 261 ASSERT_TRUE(test_server_.Start()); |
263 | 262 |
264 ProxyScriptFetcherImpl pac_fetcher(&context_); | 263 ProxyScriptFetcherImpl pac_fetcher(&context_); |
265 | 264 |
266 { // Fetch a PAC which gives a 500 -- FAIL | 265 { // Fetch a PAC which gives a 500 -- FAIL |
267 GURL url(test_server_.GetURL("files/500.pac")); | 266 GURL url(test_server_.GetURL("files/500.pac")); |
268 base::string16 text; | 267 base::string16 text; |
269 TestCompletionCallback callback; | 268 TestCompletionCallback callback; |
270 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 269 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
271 EXPECT_EQ(ERR_IO_PENDING, result); | 270 EXPECT_EQ(ERR_IO_PENDING, result); |
272 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | 271 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); |
273 EXPECT_TRUE(text.empty()); | 272 EXPECT_TRUE(text.empty()); |
274 } | 273 } |
275 { // Fetch a PAC which gives a 404 -- FAIL | 274 { // Fetch a PAC which gives a 404 -- FAIL |
276 GURL url(test_server_.GetURL("files/404.pac")); | 275 GURL url(test_server_.GetURL("files/404.pac")); |
277 base::string16 text; | 276 base::string16 text; |
278 TestCompletionCallback callback; | 277 TestCompletionCallback callback; |
279 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 278 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
280 EXPECT_EQ(ERR_IO_PENDING, result); | 279 EXPECT_EQ(ERR_IO_PENDING, result); |
281 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | 280 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); |
282 EXPECT_TRUE(text.empty()); | 281 EXPECT_TRUE(text.empty()); |
283 } | 282 } |
284 } | 283 } |
285 | 284 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
335 | 334 |
336 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { | 335 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { |
337 ASSERT_TRUE(test_server_.Start()); | 336 ASSERT_TRUE(test_server_.Start()); |
338 | 337 |
339 ProxyScriptFetcherImpl pac_fetcher(&context_); | 338 ProxyScriptFetcherImpl pac_fetcher(&context_); |
340 | 339 |
341 // Set the maximum response size to 50 bytes. | 340 // Set the maximum response size to 50 bytes. |
342 int prev_size = pac_fetcher.SetSizeConstraint(50); | 341 int prev_size = pac_fetcher.SetSizeConstraint(50); |
343 | 342 |
344 // These two URLs are the same file, but are http:// vs file:// | 343 // These two URLs are the same file, but are http:// vs file:// |
345 GURL urls[] = { | 344 GURL urls[] = {test_server_.GetURL("files/large-pac.nsproxy"), |
346 test_server_.GetURL("files/large-pac.nsproxy"), | 345 GetTestFileUrl("large-pac.nsproxy")}; |
347 GetTestFileUrl("large-pac.nsproxy") | |
348 }; | |
349 | 346 |
350 // Try fetching URLs that are 101 bytes large. We should abort the request | 347 // Try fetching URLs that are 101 bytes large. We should abort the request |
351 // after 50 bytes have been read, and fail with a too large error. | 348 // after 50 bytes have been read, and fail with a too large error. |
352 for (size_t i = 0; i < arraysize(urls); ++i) { | 349 for (size_t i = 0; i < arraysize(urls); ++i) { |
353 const GURL& url = urls[i]; | 350 const GURL& url = urls[i]; |
354 base::string16 text; | 351 base::string16 text; |
355 TestCompletionCallback callback; | 352 TestCompletionCallback callback; |
356 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 353 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
357 EXPECT_EQ(ERR_IO_PENDING, result); | 354 EXPECT_EQ(ERR_IO_PENDING, result); |
358 EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult()); | 355 EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult()); |
359 EXPECT_TRUE(text.empty()); | 356 EXPECT_TRUE(text.empty()); |
360 } | 357 } |
361 | 358 |
362 // Restore the original size bound. | 359 // Restore the original size bound. |
363 pac_fetcher.SetSizeConstraint(prev_size); | 360 pac_fetcher.SetSizeConstraint(prev_size); |
364 | 361 |
365 { // Make sure we can still fetch regular URLs. | 362 { // Make sure we can still fetch regular URLs. |
366 GURL url(test_server_.GetURL("files/pac.nsproxy")); | 363 GURL url(test_server_.GetURL("files/pac.nsproxy")); |
367 base::string16 text; | 364 base::string16 text; |
368 TestCompletionCallback callback; | 365 TestCompletionCallback callback; |
369 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 366 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
370 EXPECT_EQ(ERR_IO_PENDING, result); | 367 EXPECT_EQ(ERR_IO_PENDING, result); |
371 EXPECT_EQ(OK, callback.WaitForResult()); | 368 EXPECT_EQ(OK, callback.WaitForResult()); |
372 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 369 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
373 } | 370 } |
374 } | 371 } |
375 | 372 |
376 TEST_F(ProxyScriptFetcherImplTest, Hang) { | 373 TEST_F(ProxyScriptFetcherImplTest, Hang) { |
377 ASSERT_TRUE(test_server_.Start()); | 374 ASSERT_TRUE(test_server_.Start()); |
378 | 375 |
379 ProxyScriptFetcherImpl pac_fetcher(&context_); | 376 ProxyScriptFetcherImpl pac_fetcher(&context_); |
380 | 377 |
381 // Set the timeout period to 0.5 seconds. | 378 // Set the timeout period to 0.5 seconds. |
382 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint( | 379 base::TimeDelta prev_timeout = |
383 base::TimeDelta::FromMilliseconds(500)); | 380 pac_fetcher.SetTimeoutConstraint(base::TimeDelta::FromMilliseconds(500)); |
384 | 381 |
385 // Try fetching a URL which takes 1.2 seconds. We should abort the request | 382 // Try fetching a URL which takes 1.2 seconds. We should abort the request |
386 // after 500 ms, and fail with a timeout error. | 383 // after 500 ms, and fail with a timeout error. |
387 { | 384 { |
388 GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); | 385 GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); |
389 base::string16 text; | 386 base::string16 text; |
390 TestCompletionCallback callback; | 387 TestCompletionCallback callback; |
391 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 388 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
392 EXPECT_EQ(ERR_IO_PENDING, result); | 389 EXPECT_EQ(ERR_IO_PENDING, result); |
393 EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult()); | 390 EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult()); |
394 EXPECT_TRUE(text.empty()); | 391 EXPECT_TRUE(text.empty()); |
395 } | 392 } |
396 | 393 |
397 // Restore the original timeout period. | 394 // Restore the original timeout period. |
398 pac_fetcher.SetTimeoutConstraint(prev_timeout); | 395 pac_fetcher.SetTimeoutConstraint(prev_timeout); |
399 | 396 |
400 { // Make sure we can still fetch regular URLs. | 397 { // Make sure we can still fetch regular URLs. |
401 GURL url(test_server_.GetURL("files/pac.nsproxy")); | 398 GURL url(test_server_.GetURL("files/pac.nsproxy")); |
402 base::string16 text; | 399 base::string16 text; |
403 TestCompletionCallback callback; | 400 TestCompletionCallback callback; |
404 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 401 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
405 EXPECT_EQ(ERR_IO_PENDING, result); | 402 EXPECT_EQ(ERR_IO_PENDING, result); |
406 EXPECT_EQ(OK, callback.WaitForResult()); | 403 EXPECT_EQ(OK, callback.WaitForResult()); |
407 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 404 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
408 } | 405 } |
409 } | 406 } |
410 | 407 |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
471 { | 468 { |
472 GURL url(kEncodedUrlBroken); | 469 GURL url(kEncodedUrlBroken); |
473 base::string16 text; | 470 base::string16 text; |
474 TestCompletionCallback callback; | 471 TestCompletionCallback callback; |
475 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 472 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
476 EXPECT_EQ(ERR_FAILED, result); | 473 EXPECT_EQ(ERR_FAILED, result); |
477 } | 474 } |
478 } | 475 } |
479 | 476 |
480 } // namespace net | 477 } // namespace net |
OLD | NEW |