Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: net/proxy/proxy_script_fetcher_impl_unittest.cc

Issue 6831025: Adds support for the DHCP portion of the WPAD (proxy auto-discovery) protocol. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Add timeout on Win32 DHCP API. Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/proxy/proxy_script_fetcher_impl.h" 5 #include "net/proxy/proxy_script_fetcher_impl.h"
6 6
7 #include <string> 7 #include <string>
8 8
9 #include "base/file_path.h" 9 #include "base/file_path.h"
10 #include "base/compiler_specific.h" 10 #include "base/compiler_specific.h"
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
95 TestServer test_server_; 95 TestServer test_server_;
96 }; 96 };
97 97
98 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { 98 TEST_F(ProxyScriptFetcherImplTest, FileUrl) {
99 scoped_refptr<URLRequestContext> context(new RequestContext); 99 scoped_refptr<URLRequestContext> context(new RequestContext);
100 ProxyScriptFetcherImpl pac_fetcher(context); 100 ProxyScriptFetcherImpl pac_fetcher(context);
101 101
102 { // Fetch a non-existent file. 102 { // Fetch a non-existent file.
103 string16 text; 103 string16 text;
104 TestCompletionCallback callback; 104 TestCompletionCallback callback;
105 int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"), 105 pac_fetcher.SetURL(GetTestFileUrl("does-not-exist"));
106 &text, &callback); 106 int result = pac_fetcher.Fetch(&text, &callback);
107 EXPECT_EQ(ERR_IO_PENDING, result); 107 EXPECT_EQ(ERR_IO_PENDING, result);
108 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); 108 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult());
109 EXPECT_TRUE(text.empty()); 109 EXPECT_TRUE(text.empty());
110 } 110 }
111 { // Fetch a file that exists. 111 { // Fetch a file that exists.
112 string16 text; 112 string16 text;
113 TestCompletionCallback callback; 113 TestCompletionCallback callback;
114 int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"), 114 pac_fetcher.SetURL(GetTestFileUrl("pac.txt"));
115 &text, &callback); 115 int result = pac_fetcher.Fetch(&text, &callback);
116 EXPECT_EQ(ERR_IO_PENDING, result); 116 EXPECT_EQ(ERR_IO_PENDING, result);
117 EXPECT_EQ(OK, callback.WaitForResult()); 117 EXPECT_EQ(OK, callback.WaitForResult());
118 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); 118 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
119 } 119 }
120 } 120 }
121 121
122 // Note that all mime types are allowed for PAC file, to be consistent 122 // Note that all mime types are allowed for PAC file, to be consistent
123 // with other browsers. 123 // with other browsers.
124 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { 124 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) {
125 ASSERT_TRUE(test_server_.Start()); 125 ASSERT_TRUE(test_server_.Start());
126 126
127 scoped_refptr<URLRequestContext> context(new RequestContext); 127 scoped_refptr<URLRequestContext> context(new RequestContext);
128 ProxyScriptFetcherImpl pac_fetcher(context); 128 ProxyScriptFetcherImpl pac_fetcher(context);
129 129
130 { // Fetch a PAC with mime type "text/plain" 130 { // Fetch a PAC with mime type "text/plain"
131 GURL url(test_server_.GetURL("files/pac.txt")); 131 GURL url(test_server_.GetURL("files/pac.txt"));
132 string16 text; 132 string16 text;
133 TestCompletionCallback callback; 133 TestCompletionCallback callback;
134 int result = pac_fetcher.Fetch(url, &text, &callback); 134 pac_fetcher.SetURL(url);
135 int result = pac_fetcher.Fetch(&text, &callback);
135 EXPECT_EQ(ERR_IO_PENDING, result); 136 EXPECT_EQ(ERR_IO_PENDING, result);
136 EXPECT_EQ(OK, callback.WaitForResult()); 137 EXPECT_EQ(OK, callback.WaitForResult());
137 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); 138 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
138 } 139 }
139 { // Fetch a PAC with mime type "text/html" 140 { // Fetch a PAC with mime type "text/html"
140 GURL url(test_server_.GetURL("files/pac.html")); 141 GURL url(test_server_.GetURL("files/pac.html"));
141 string16 text; 142 string16 text;
142 TestCompletionCallback callback; 143 TestCompletionCallback callback;
143 int result = pac_fetcher.Fetch(url, &text, &callback); 144 pac_fetcher.SetURL(url);
145 int result = pac_fetcher.Fetch(&text, &callback);
144 EXPECT_EQ(ERR_IO_PENDING, result); 146 EXPECT_EQ(ERR_IO_PENDING, result);
145 EXPECT_EQ(OK, callback.WaitForResult()); 147 EXPECT_EQ(OK, callback.WaitForResult());
146 EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text); 148 EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text);
147 } 149 }
148 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" 150 { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig"
149 GURL url(test_server_.GetURL("files/pac.nsproxy")); 151 GURL url(test_server_.GetURL("files/pac.nsproxy"));
150 string16 text; 152 string16 text;
151 TestCompletionCallback callback; 153 TestCompletionCallback callback;
152 int result = pac_fetcher.Fetch(url, &text, &callback); 154 pac_fetcher.SetURL(url);
155 int result = pac_fetcher.Fetch(&text, &callback);
153 EXPECT_EQ(ERR_IO_PENDING, result); 156 EXPECT_EQ(ERR_IO_PENDING, result);
154 EXPECT_EQ(OK, callback.WaitForResult()); 157 EXPECT_EQ(OK, callback.WaitForResult());
155 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); 158 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
156 } 159 }
157 } 160 }
158 161
159 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { 162 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) {
160 ASSERT_TRUE(test_server_.Start()); 163 ASSERT_TRUE(test_server_.Start());
161 164
162 scoped_refptr<URLRequestContext> context(new RequestContext); 165 scoped_refptr<URLRequestContext> context(new RequestContext);
163 ProxyScriptFetcherImpl pac_fetcher(context); 166 ProxyScriptFetcherImpl pac_fetcher(context);
164 167
165 { // Fetch a PAC which gives a 500 -- FAIL 168 { // Fetch a PAC which gives a 500 -- FAIL
166 GURL url(test_server_.GetURL("files/500.pac")); 169 GURL url(test_server_.GetURL("files/500.pac"));
167 string16 text; 170 string16 text;
168 TestCompletionCallback callback; 171 TestCompletionCallback callback;
169 int result = pac_fetcher.Fetch(url, &text, &callback); 172 pac_fetcher.SetURL(url);
173 int result = pac_fetcher.Fetch(&text, &callback);
170 EXPECT_EQ(ERR_IO_PENDING, result); 174 EXPECT_EQ(ERR_IO_PENDING, result);
171 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); 175 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
172 EXPECT_TRUE(text.empty()); 176 EXPECT_TRUE(text.empty());
173 } 177 }
174 { // Fetch a PAC which gives a 404 -- FAIL 178 { // Fetch a PAC which gives a 404 -- FAIL
175 GURL url(test_server_.GetURL("files/404.pac")); 179 GURL url(test_server_.GetURL("files/404.pac"));
176 string16 text; 180 string16 text;
177 TestCompletionCallback callback; 181 TestCompletionCallback callback;
178 int result = pac_fetcher.Fetch(url, &text, &callback); 182 pac_fetcher.SetURL(url);
183 int result = pac_fetcher.Fetch(&text, &callback);
179 EXPECT_EQ(ERR_IO_PENDING, result); 184 EXPECT_EQ(ERR_IO_PENDING, result);
180 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); 185 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
181 EXPECT_TRUE(text.empty()); 186 EXPECT_TRUE(text.empty());
182 } 187 }
183 } 188 }
184 189
185 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { 190 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) {
186 ASSERT_TRUE(test_server_.Start()); 191 ASSERT_TRUE(test_server_.Start());
187 192
188 scoped_refptr<URLRequestContext> context(new RequestContext); 193 scoped_refptr<URLRequestContext> context(new RequestContext);
189 ProxyScriptFetcherImpl pac_fetcher(context); 194 ProxyScriptFetcherImpl pac_fetcher(context);
190 195
191 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should 196 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should
192 // have no effect. 197 // have no effect.
193 GURL url(test_server_.GetURL("files/downloadable.pac")); 198 GURL url(test_server_.GetURL("files/downloadable.pac"));
194 string16 text; 199 string16 text;
195 TestCompletionCallback callback; 200 TestCompletionCallback callback;
196 int result = pac_fetcher.Fetch(url, &text, &callback); 201 pac_fetcher.SetURL(url);
202 int result = pac_fetcher.Fetch(&text, &callback);
197 EXPECT_EQ(ERR_IO_PENDING, result); 203 EXPECT_EQ(ERR_IO_PENDING, result);
198 EXPECT_EQ(OK, callback.WaitForResult()); 204 EXPECT_EQ(OK, callback.WaitForResult());
199 EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); 205 EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text);
200 } 206 }
201 207
202 TEST_F(ProxyScriptFetcherImplTest, NoCache) { 208 TEST_F(ProxyScriptFetcherImplTest, NoCache) {
203 ASSERT_TRUE(test_server_.Start()); 209 ASSERT_TRUE(test_server_.Start());
204 210
205 scoped_refptr<URLRequestContext> context(new RequestContext); 211 scoped_refptr<URLRequestContext> context(new RequestContext);
206 ProxyScriptFetcherImpl pac_fetcher(context); 212 ProxyScriptFetcherImpl pac_fetcher(context);
207 213
208 // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. 214 // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour.
209 GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); 215 GURL url(test_server_.GetURL("files/cacheable_1hr.pac"));
210 { 216 {
211 string16 text; 217 string16 text;
212 TestCompletionCallback callback; 218 TestCompletionCallback callback;
213 int result = pac_fetcher.Fetch(url, &text, &callback); 219 pac_fetcher.SetURL(url);
220 int result = pac_fetcher.Fetch(&text, &callback);
214 EXPECT_EQ(ERR_IO_PENDING, result); 221 EXPECT_EQ(ERR_IO_PENDING, result);
215 EXPECT_EQ(OK, callback.WaitForResult()); 222 EXPECT_EQ(OK, callback.WaitForResult());
216 EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text); 223 EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text);
217 } 224 }
218 225
219 // Now kill the HTTP server. 226 // Now kill the HTTP server.
220 ASSERT_TRUE(test_server_.Stop()); 227 ASSERT_TRUE(test_server_.Stop());
221 228
222 // Try to fetch the file again -- if should fail, since the server is not 229 // Try to fetch the file again -- if should fail, since the server is not
223 // running anymore. (If it were instead being loaded from cache, we would 230 // running anymore. (If it were instead being loaded from cache, we would
224 // get a success. 231 // get a success.
225 { 232 {
226 string16 text; 233 string16 text;
227 TestCompletionCallback callback; 234 TestCompletionCallback callback;
228 int result = pac_fetcher.Fetch(url, &text, &callback); 235 pac_fetcher.SetURL(url);
236 int result = pac_fetcher.Fetch(&text, &callback);
229 EXPECT_EQ(ERR_IO_PENDING, result); 237 EXPECT_EQ(ERR_IO_PENDING, result);
230 EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult()); 238 EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult());
231 } 239 }
232 } 240 }
233 241
234 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { 242 TEST_F(ProxyScriptFetcherImplTest, TooLarge) {
235 ASSERT_TRUE(test_server_.Start()); 243 ASSERT_TRUE(test_server_.Start());
236 244
237 scoped_refptr<URLRequestContext> context(new RequestContext); 245 scoped_refptr<URLRequestContext> context(new RequestContext);
238 ProxyScriptFetcherImpl pac_fetcher(context); 246 ProxyScriptFetcherImpl pac_fetcher(context);
239 247
240 // Set the maximum response size to 50 bytes. 248 // Set the maximum response size to 50 bytes.
241 int prev_size = pac_fetcher.SetSizeConstraint(50); 249 int prev_size = pac_fetcher.SetSizeConstraint(50);
242 250
243 // These two URLs are the same file, but are http:// vs file:// 251 // These two URLs are the same file, but are http:// vs file://
244 GURL urls[] = { 252 GURL urls[] = {
245 test_server_.GetURL("files/large-pac.nsproxy"), 253 test_server_.GetURL("files/large-pac.nsproxy"),
246 GetTestFileUrl("large-pac.nsproxy") 254 GetTestFileUrl("large-pac.nsproxy")
247 }; 255 };
248 256
249 // Try fetching URLs that are 101 bytes large. We should abort the request 257 // Try fetching URLs that are 101 bytes large. We should abort the request
250 // after 50 bytes have been read, and fail with a too large error. 258 // after 50 bytes have been read, and fail with a too large error.
251 for (size_t i = 0; i < arraysize(urls); ++i) { 259 for (size_t i = 0; i < arraysize(urls); ++i) {
252 const GURL& url = urls[i]; 260 const GURL& url = urls[i];
253 string16 text; 261 string16 text;
254 TestCompletionCallback callback; 262 TestCompletionCallback callback;
255 int result = pac_fetcher.Fetch(url, &text, &callback); 263 pac_fetcher.SetURL(url);
264 int result = pac_fetcher.Fetch(&text, &callback);
256 EXPECT_EQ(ERR_IO_PENDING, result); 265 EXPECT_EQ(ERR_IO_PENDING, result);
257 EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult()); 266 EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult());
258 EXPECT_TRUE(text.empty()); 267 EXPECT_TRUE(text.empty());
259 } 268 }
260 269
261 // Restore the original size bound. 270 // Restore the original size bound.
262 pac_fetcher.SetSizeConstraint(prev_size); 271 pac_fetcher.SetSizeConstraint(prev_size);
263 272
264 { // Make sure we can still fetch regular URLs. 273 { // Make sure we can still fetch regular URLs.
265 GURL url(test_server_.GetURL("files/pac.nsproxy")); 274 GURL url(test_server_.GetURL("files/pac.nsproxy"));
266 string16 text; 275 string16 text;
267 TestCompletionCallback callback; 276 TestCompletionCallback callback;
268 int result = pac_fetcher.Fetch(url, &text, &callback); 277 pac_fetcher.SetURL(url);
278 int result = pac_fetcher.Fetch(&text, &callback);
269 EXPECT_EQ(ERR_IO_PENDING, result); 279 EXPECT_EQ(ERR_IO_PENDING, result);
270 EXPECT_EQ(OK, callback.WaitForResult()); 280 EXPECT_EQ(OK, callback.WaitForResult());
271 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); 281 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
272 } 282 }
273 } 283 }
274 284
275 TEST_F(ProxyScriptFetcherImplTest, Hang) { 285 TEST_F(ProxyScriptFetcherImplTest, Hang) {
276 ASSERT_TRUE(test_server_.Start()); 286 ASSERT_TRUE(test_server_.Start());
277 287
278 scoped_refptr<URLRequestContext> context(new RequestContext); 288 scoped_refptr<URLRequestContext> context(new RequestContext);
279 ProxyScriptFetcherImpl pac_fetcher(context); 289 ProxyScriptFetcherImpl pac_fetcher(context);
280 290
281 // Set the timeout period to 0.5 seconds. 291 // Set the timeout period to 0.5 seconds.
282 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint( 292 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint(
283 base::TimeDelta::FromMilliseconds(500)); 293 base::TimeDelta::FromMilliseconds(500));
284 294
285 // Try fetching a URL which takes 1.2 seconds. We should abort the request 295 // Try fetching a URL which takes 1.2 seconds. We should abort the request
286 // after 500 ms, and fail with a timeout error. 296 // after 500 ms, and fail with a timeout error.
287 { GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); 297 { GURL url(test_server_.GetURL("slow/proxy.pac?1.2"));
288 string16 text; 298 string16 text;
289 TestCompletionCallback callback; 299 TestCompletionCallback callback;
290 int result = pac_fetcher.Fetch(url, &text, &callback); 300 pac_fetcher.SetURL(url);
301 int result = pac_fetcher.Fetch(&text, &callback);
291 EXPECT_EQ(ERR_IO_PENDING, result); 302 EXPECT_EQ(ERR_IO_PENDING, result);
292 EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult()); 303 EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult());
293 EXPECT_TRUE(text.empty()); 304 EXPECT_TRUE(text.empty());
294 } 305 }
295 306
296 // Restore the original timeout period. 307 // Restore the original timeout period.
297 pac_fetcher.SetTimeoutConstraint(prev_timeout); 308 pac_fetcher.SetTimeoutConstraint(prev_timeout);
298 309
299 { // Make sure we can still fetch regular URLs. 310 { // Make sure we can still fetch regular URLs.
300 GURL url(test_server_.GetURL("files/pac.nsproxy")); 311 GURL url(test_server_.GetURL("files/pac.nsproxy"));
301 string16 text; 312 string16 text;
302 TestCompletionCallback callback; 313 TestCompletionCallback callback;
303 int result = pac_fetcher.Fetch(url, &text, &callback); 314 pac_fetcher.SetURL(url);
315 int result = pac_fetcher.Fetch(&text, &callback);
304 EXPECT_EQ(ERR_IO_PENDING, result); 316 EXPECT_EQ(ERR_IO_PENDING, result);
305 EXPECT_EQ(OK, callback.WaitForResult()); 317 EXPECT_EQ(OK, callback.WaitForResult());
306 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); 318 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
307 } 319 }
308 } 320 }
309 321
310 // The ProxyScriptFetcher should decode any content-codings 322 // The ProxyScriptFetcher should decode any content-codings
311 // (like gzip, bzip, etc.), and apply any charset conversions to yield 323 // (like gzip, bzip, etc.), and apply any charset conversions to yield
312 // UTF8. 324 // UTF8.
313 TEST_F(ProxyScriptFetcherImplTest, Encodings) { 325 TEST_F(ProxyScriptFetcherImplTest, Encodings) {
314 ASSERT_TRUE(test_server_.Start()); 326 ASSERT_TRUE(test_server_.Start());
315 327
316 scoped_refptr<URLRequestContext> context(new RequestContext); 328 scoped_refptr<URLRequestContext> context(new RequestContext);
317 ProxyScriptFetcherImpl pac_fetcher(context); 329 ProxyScriptFetcherImpl pac_fetcher(context);
318 330
319 // Test a response that is gzip-encoded -- should get inflated. 331 // Test a response that is gzip-encoded -- should get inflated.
320 { 332 {
321 GURL url(test_server_.GetURL("files/gzipped_pac")); 333 GURL url(test_server_.GetURL("files/gzipped_pac"));
322 string16 text; 334 string16 text;
323 TestCompletionCallback callback; 335 TestCompletionCallback callback;
324 int result = pac_fetcher.Fetch(url, &text, &callback); 336 pac_fetcher.SetURL(url);
337 int result = pac_fetcher.Fetch(&text, &callback);
325 EXPECT_EQ(ERR_IO_PENDING, result); 338 EXPECT_EQ(ERR_IO_PENDING, result);
326 EXPECT_EQ(OK, callback.WaitForResult()); 339 EXPECT_EQ(OK, callback.WaitForResult());
327 EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text); 340 EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text);
328 } 341 }
329 342
330 // Test a response that was served as UTF-16 (BE). It should 343 // Test a response that was served as UTF-16 (BE). It should
331 // be converted to UTF8. 344 // be converted to UTF8.
332 { 345 {
333 GURL url(test_server_.GetURL("files/utf16be_pac")); 346 GURL url(test_server_.GetURL("files/utf16be_pac"));
334 string16 text; 347 string16 text;
335 TestCompletionCallback callback; 348 TestCompletionCallback callback;
336 int result = pac_fetcher.Fetch(url, &text, &callback); 349 pac_fetcher.SetURL(url);
350 int result = pac_fetcher.Fetch(&text, &callback);
337 EXPECT_EQ(ERR_IO_PENDING, result); 351 EXPECT_EQ(ERR_IO_PENDING, result);
338 EXPECT_EQ(OK, callback.WaitForResult()); 352 EXPECT_EQ(OK, callback.WaitForResult());
339 EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text); 353 EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text);
340 } 354 }
341 } 355 }
342 356
343 TEST_F(ProxyScriptFetcherImplTest, DataURLs) { 357 TEST_F(ProxyScriptFetcherImplTest, DataURLs) {
344 scoped_refptr<URLRequestContext> context(new RequestContext); 358 scoped_refptr<URLRequestContext> context(new RequestContext);
345 ProxyScriptFetcherImpl pac_fetcher(context); 359 ProxyScriptFetcherImpl pac_fetcher(context);
346 360
347 const char kEncodedUrl[] = 361 const char kEncodedUrl[] =
348 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R" 362 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R"
349 "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl" 363 "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl"
350 "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0="; 364 "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0=";
351 const char kPacScript[] = 365 const char kPacScript[] =
352 "function FindProxyForURL(url, host) {\n" 366 "function FindProxyForURL(url, host) {\n"
353 " if (host == 'foobar.com')\n" 367 " if (host == 'foobar.com')\n"
354 " return 'PROXY blackhole:80';\n" 368 " return 'PROXY blackhole:80';\n"
355 " return 'DIRECT';\n" 369 " return 'DIRECT';\n"
356 "}"; 370 "}";
357 371
358 // Test fetching a "data:"-url containing a base64 encoded PAC script. 372 // Test fetching a "data:"-url containing a base64 encoded PAC script.
359 { 373 {
360 GURL url(kEncodedUrl); 374 GURL url(kEncodedUrl);
361 string16 text; 375 string16 text;
362 TestCompletionCallback callback; 376 TestCompletionCallback callback;
363 int result = pac_fetcher.Fetch(url, &text, &callback); 377 pac_fetcher.SetURL(url);
378 int result = pac_fetcher.Fetch(&text, &callback);
364 EXPECT_EQ(OK, result); 379 EXPECT_EQ(OK, result);
365 EXPECT_EQ(ASCIIToUTF16(kPacScript), text); 380 EXPECT_EQ(ASCIIToUTF16(kPacScript), text);
366 } 381 }
367 382
368 const char kEncodedUrlBroken[] = 383 const char kEncodedUrlBroken[] =
369 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R"; 384 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R";
370 385
371 // Test a broken "data:"-url containing a base64 encoded PAC script. 386 // Test a broken "data:"-url containing a base64 encoded PAC script.
372 { 387 {
373 GURL url(kEncodedUrlBroken); 388 GURL url(kEncodedUrlBroken);
374 string16 text; 389 string16 text;
375 TestCompletionCallback callback; 390 TestCompletionCallback callback;
376 int result = pac_fetcher.Fetch(url, &text, &callback); 391 pac_fetcher.SetURL(url);
392 int result = pac_fetcher.Fetch(&text, &callback);
377 EXPECT_EQ(ERR_FAILED, result); 393 EXPECT_EQ(ERR_FAILED, result);
378 } 394 }
379 } 395 }
380 396
381 } // namespace net 397 } // namespace net
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698