Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(298)

Side by Side Diff: net/url_request/url_request_http_job.cc

Issue 7044092: Not allow compression when requesting multimedia (Closed) Base URL: http://src.chromium.org/svn/trunk/src/
Patch Set: '' Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « media/filters/ffmpeg_demuxer.cc ('k') | net/url_request/url_request_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/url_request/url_request_http_job.h" 5 #include "net/url_request/url_request_http_job.h"
6 6
7 #include "base/base_switches.h" 7 #include "base/base_switches.h"
8 #include "base/command_line.h" 8 #include "base/command_line.h"
9 #include "base/compiler_specific.h" 9 #include "base/compiler_specific.h"
10 #include "base/file_util.h" 10 #include "base/file_util.h"
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
282 URLRequestThrottlerHeaderAdapter response_adapter( 282 URLRequestThrottlerHeaderAdapter response_adapter(
283 response_info_->headers); 283 response_info_->headers);
284 throttling_entry_->UpdateWithResponse(request_info_.url.host(), 284 throttling_entry_->UpdateWithResponse(request_info_.url.host(),
285 &response_adapter); 285 &response_adapter);
286 } 286 }
287 287
288 ProcessStrictTransportSecurityHeader(); 288 ProcessStrictTransportSecurityHeader();
289 289
290 if (SdchManager::Global() && 290 if (SdchManager::Global() &&
291 SdchManager::Global()->IsInSupportedDomain(request_->url())) { 291 SdchManager::Global()->IsInSupportedDomain(request_->url())) {
292 static const std::string name = "Get-Dictionary";
292 std::string url_text; 293 std::string url_text;
293 void* iter = NULL; 294 void* iter = NULL;
294 // TODO(jar): We need to not fetch dictionaries the first time they are 295 // TODO(jar): We need to not fetch dictionaries the first time they are
295 // seen, but rather wait until we can justify their usefulness. 296 // seen, but rather wait until we can justify their usefulness.
296 // For now, we will only fetch the first dictionary, which will at least 297 // For now, we will only fetch the first dictionary, which will at least
297 // require multiple suggestions before we get additional ones for this site. 298 // require multiple suggestions before we get additional ones for this site.
298 // Eventually we should wait until a dictionary is requested several times 299 // Eventually we should wait until a dictionary is requested several times
299 // before we even download it (so that we don't waste memory or bandwidth). 300 // before we even download it (so that we don't waste memory or bandwidth).
300 if (response_info_->headers->EnumerateHeader(&iter, "Get-Dictionary", 301 if (response_info_->headers->EnumerateHeader(&iter, name, &url_text)) {
301 &url_text)) {
302 // request_->url() won't be valid in the destructor, so we use an 302 // request_->url() won't be valid in the destructor, so we use an
303 // alternate copy. 303 // alternate copy.
304 DCHECK_EQ(request_->url(), request_info_.url); 304 DCHECK_EQ(request_->url(), request_info_.url);
305 // Resolve suggested URL relative to request url. 305 // Resolve suggested URL relative to request url.
306 sdch_dictionary_url_ = request_info_.url.Resolve(url_text); 306 sdch_dictionary_url_ = request_info_.url.Resolve(url_text);
307 } 307 }
308 } 308 }
309 309
310 // The HTTP transaction may be restarted several times for the purposes 310 // The HTTP transaction may be restarted several times for the purposes
311 // of sending authorization information. Each time it restarts, we get 311 // of sending authorization information. Each time it restarts, we get
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
391 391
392 // The transaction started synchronously, but we need to notify the 392 // The transaction started synchronously, but we need to notify the
393 // URLRequest delegate via the message loop. 393 // URLRequest delegate via the message loop.
394 MessageLoop::current()->PostTask( 394 MessageLoop::current()->PostTask(
395 FROM_HERE, 395 FROM_HERE,
396 method_factory_.NewRunnableMethod( 396 method_factory_.NewRunnableMethod(
397 &URLRequestHttpJob::OnStartCompleted, rv)); 397 &URLRequestHttpJob::OnStartCompleted, rv));
398 } 398 }
399 399
400 void URLRequestHttpJob::AddExtraHeaders() { 400 void URLRequestHttpJob::AddExtraHeaders() {
401 // TODO(jar): Consider optimizing away SDCH advertising bytes when the URL is 401 // Supply Accept-Encoding field only if it is not already provided.
402 // probably an img or such (and SDCH encoding is not likely). 402 // It should be provided IF the content is known to have restrictions on
403 bool advertise_sdch = SdchManager::Global() && 403 // potential encoding, such as streaming multi-media.
404 SdchManager::Global()->IsInSupportedDomain(request_->url()); 404 // For details see bug 47381.
405 std::string avail_dictionaries; 405 // TODO(jar, enal): jpeg files etc. should set up a request header if
406 if (advertise_sdch) { 406 // possible. Right now it is done only by buffered_resource_loader and
407 SdchManager::Global()->GetAvailDictionaryList(request_->url(), 407 // simple_data_source.
408 &avail_dictionaries); 408 if (!request_info_.extra_headers.HasHeader(
409 HttpRequestHeaders::kAcceptEncoding)) {
410 bool advertise_sdch = SdchManager::Global() &&
411 SdchManager::Global()->IsInSupportedDomain(request_->url());
412 std::string avail_dictionaries;
413 if (advertise_sdch) {
414 SdchManager::Global()->GetAvailDictionaryList(request_->url(),
415 &avail_dictionaries);
409 416
410 // The AllowLatencyExperiment() is only true if we've successfully done a 417 // The AllowLatencyExperiment() is only true if we've successfully done a
411 // full SDCH compression recently in this browser session for this host. 418 // full SDCH compression recently in this browser session for this host.
412 // Note that for this path, there might be no applicable dictionaries, and 419 // Note that for this path, there might be no applicable dictionaries,
413 // hence we can't participate in the experiment. 420 // and hence we can't participate in the experiment.
414 if (!avail_dictionaries.empty() && 421 if (!avail_dictionaries.empty() &&
415 SdchManager::Global()->AllowLatencyExperiment(request_->url())) { 422 SdchManager::Global()->AllowLatencyExperiment(request_->url())) {
416 // We are participating in the test (or control), and hence we'll 423 // We are participating in the test (or control), and hence we'll
417 // eventually record statistics via either SDCH_EXPERIMENT_DECODE or 424 // eventually record statistics via either SDCH_EXPERIMENT_DECODE or
418 // SDCH_EXPERIMENT_HOLDBACK, and we'll need some packet timing data. 425 // SDCH_EXPERIMENT_HOLDBACK, and we'll need some packet timing data.
419 packet_timing_enabled_ = true; 426 packet_timing_enabled_ = true;
420 if (base::RandDouble() < .01) { 427 if (base::RandDouble() < .01) {
421 sdch_test_control_ = true; // 1% probability. 428 sdch_test_control_ = true; // 1% probability.
422 advertise_sdch = false; 429 advertise_sdch = false;
423 } else { 430 } else {
424 sdch_test_activated_ = true; 431 sdch_test_activated_ = true;
432 }
433 }
434 }
435
436 // Supply Accept-Encoding headers first so that it is more likely that they
437 // will be in the first transmitted packet. This can sometimes make it
438 // easier to filter and analyze the streams to assure that a proxy has not
439 // damaged these headers. Some proxies deliberately corrupt Accept-Encoding
440 // headers.
441 if (!advertise_sdch) {
442 // Tell the server what compression formats we support (other than SDCH).
443 request_info_.extra_headers.SetHeader(
444 HttpRequestHeaders::kAcceptEncoding, "gzip,deflate");
445 } else {
446 // Include SDCH in acceptable list.
447 request_info_.extra_headers.SetHeader(
448 HttpRequestHeaders::kAcceptEncoding, "gzip,deflate,sdch");
449 if (!avail_dictionaries.empty()) {
450 request_info_.extra_headers.SetHeader(
451 kAvailDictionaryHeader,
452 avail_dictionaries);
453 sdch_dictionary_advertised_ = true;
454 // Since we're tagging this transaction as advertising a dictionary,
455 // we'll definitely employ an SDCH filter (or tentative sdch filter)
456 // when we get a response. When done, we'll record histograms via
457 // SDCH_DECODE or SDCH_PASSTHROUGH. Hence we need to record packet
458 // arrival times.
459 packet_timing_enabled_ = true;
425 } 460 }
426 } 461 }
427 } 462 }
428 463
429 // Supply Accept-Encoding headers first so that it is more likely that they
430 // will be in the first transmitted packet. This can sometimes make it easier
431 // to filter and analyze the streams to assure that a proxy has not damaged
432 // these headers. Some proxies deliberately corrupt Accept-Encoding headers.
433 if (!advertise_sdch) {
434 // Tell the server what compression formats we support (other than SDCH).
435 request_info_.extra_headers.SetHeader(
436 HttpRequestHeaders::kAcceptEncoding, "gzip,deflate");
437 } else {
438 // Include SDCH in acceptable list.
439 request_info_.extra_headers.SetHeader(
440 HttpRequestHeaders::kAcceptEncoding, "gzip,deflate,sdch");
441 if (!avail_dictionaries.empty()) {
442 request_info_.extra_headers.SetHeader(
443 kAvailDictionaryHeader,
444 avail_dictionaries);
445 sdch_dictionary_advertised_ = true;
446 // Since we're tagging this transaction as advertising a dictionary, we'll
447 // definitely employ an SDCH filter (or tentative sdch filter) when we get
448 // a response. When done, we'll record histograms via SDCH_DECODE or
449 // SDCH_PASSTHROUGH. Hence we need to record packet arrival times.
450 packet_timing_enabled_ = true;
451 }
452 }
453
454 URLRequestContext* context = request_->context(); 464 URLRequestContext* context = request_->context();
455 if (context) { 465 if (context) {
456 // Only add default Accept-Language and Accept-Charset if the request 466 // Only add default Accept-Language and Accept-Charset if the request
457 // didn't have them specified. 467 // didn't have them specified.
458 if (!context->accept_language().empty()) { 468 if (!context->accept_language().empty()) {
459 request_info_.extra_headers.SetHeaderIfMissing( 469 request_info_.extra_headers.SetHeaderIfMissing(
460 HttpRequestHeaders::kAcceptLanguage, 470 HttpRequestHeaders::kAcceptLanguage,
461 context->accept_language()); 471 context->accept_language());
462 } 472 }
463 if (!context->accept_charset().empty()) { 473 if (!context->accept_charset().empty()) {
(...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after
1409 } 1419 }
1410 1420
1411 bool URLRequestHttpJob::IsCompressibleContent() const { 1421 bool URLRequestHttpJob::IsCompressibleContent() const {
1412 std::string mime_type; 1422 std::string mime_type;
1413 return GetMimeType(&mime_type) && 1423 return GetMimeType(&mime_type) &&
1414 (IsSupportedJavascriptMimeType(mime_type.c_str()) || 1424 (IsSupportedJavascriptMimeType(mime_type.c_str()) ||
1415 IsSupportedNonImageMimeType(mime_type.c_str())); 1425 IsSupportedNonImageMimeType(mime_type.c_str()));
1416 } 1426 }
1417 1427
1418 } // namespace net 1428 } // namespace net
OLDNEW
« no previous file with comments | « media/filters/ffmpeg_demuxer.cc ('k') | net/url_request/url_request_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698