OLD | NEW |
1 // Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "net/url_request/url_request_http_job.h" | 5 #include "net/url_request/url_request_http_job.h" |
6 | 6 |
7 #include "base/base_switches.h" | 7 #include "base/base_switches.h" |
8 #include "base/command_line.h" | 8 #include "base/command_line.h" |
9 #include "base/compiler_specific.h" | 9 #include "base/compiler_specific.h" |
10 #include "base/file_util.h" | 10 #include "base/file_util.h" |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
58 ALLOW_THIS_IN_INITIALIZER_LIST( | 58 ALLOW_THIS_IN_INITIALIZER_LIST( |
59 start_callback_(this, &URLRequestHttpJob::OnStartCompleted)), | 59 start_callback_(this, &URLRequestHttpJob::OnStartCompleted)), |
60 ALLOW_THIS_IN_INITIALIZER_LIST( | 60 ALLOW_THIS_IN_INITIALIZER_LIST( |
61 read_callback_(this, &URLRequestHttpJob::OnReadCompleted)), | 61 read_callback_(this, &URLRequestHttpJob::OnReadCompleted)), |
62 read_in_progress_(false), | 62 read_in_progress_(false), |
63 context_(request->context()) { | 63 context_(request->context()) { |
64 } | 64 } |
65 | 65 |
66 URLRequestHttpJob::~URLRequestHttpJob() { | 66 URLRequestHttpJob::~URLRequestHttpJob() { |
67 if (sdch_dictionary_url_.is_valid()) { | 67 if (sdch_dictionary_url_.is_valid()) { |
68 SdchManager::Global()->FetchDictionary(sdch_dictionary_url_); | 68 // Prior to reaching the destructor, request_ has been set to a NULL |
| 69 // pointer, so request_->url() is no longer valid in the destructor, and we |
| 70 // use an alternate copy |request_info_.url|. |
| 71 SdchManager::Global()->FetchDictionary(request_info_.url, |
| 72 sdch_dictionary_url_); |
69 } | 73 } |
70 } | 74 } |
71 | 75 |
72 void URLRequestHttpJob::SetUpload(net::UploadData* upload) { | 76 void URLRequestHttpJob::SetUpload(net::UploadData* upload) { |
73 DCHECK(!transaction_.get()) << "cannot change once started"; | 77 DCHECK(!transaction_.get()) << "cannot change once started"; |
74 request_info_.upload_data = upload; | 78 request_info_.upload_data = upload; |
75 } | 79 } |
76 | 80 |
77 void URLRequestHttpJob::SetExtraRequestHeaders( | 81 void URLRequestHttpJob::SetExtraRequestHeaders( |
78 const std::string& headers) { | 82 const std::string& headers) { |
(...skipping 368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
447 static const std::string name = "Get-Dictionary"; | 451 static const std::string name = "Get-Dictionary"; |
448 std::string url_text; | 452 std::string url_text; |
449 void* iter = NULL; | 453 void* iter = NULL; |
450 // TODO(jar): We need to not fetch dictionaries the first time they are | 454 // TODO(jar): We need to not fetch dictionaries the first time they are |
451 // seen, but rather wait until we can justify their usefulness. | 455 // seen, but rather wait until we can justify their usefulness. |
452 // For now, we will only fetch the first dictionary, which will at least | 456 // For now, we will only fetch the first dictionary, which will at least |
453 // require multiple suggestions before we get additional ones for this site. | 457 // require multiple suggestions before we get additional ones for this site. |
454 // Eventually we should wait until a dictionary is requested several times | 458 // Eventually we should wait until a dictionary is requested several times |
455 // before we even download it (so that we don't waste memory or bandwidth). | 459 // before we even download it (so that we don't waste memory or bandwidth). |
456 if (response_info_->headers->EnumerateHeader(&iter, name, &url_text)) { | 460 if (response_info_->headers->EnumerateHeader(&iter, name, &url_text)) { |
457 GURL dictionary_url = request_->url().Resolve(url_text); | 461 // request_->url() won't be valid in the destructor, so we use an |
458 if (SdchManager::Global()->CanFetchDictionary(request_->url(), | 462 // alternate copy. |
459 dictionary_url)) | 463 DCHECK(request_->url() == request_info_.url); |
460 sdch_dictionary_url_ = dictionary_url; | 464 // Resolve suggested URL relative to request url. |
| 465 sdch_dictionary_url_ = request_info_.url.Resolve(url_text); |
461 } | 466 } |
462 } | 467 } |
463 | 468 |
464 URLRequestJob::NotifyHeadersComplete(); | 469 URLRequestJob::NotifyHeadersComplete(); |
465 } | 470 } |
466 | 471 |
467 void URLRequestHttpJob::DestroyTransaction() { | 472 void URLRequestHttpJob::DestroyTransaction() { |
468 DCHECK(transaction_.get()); | 473 DCHECK(transaction_.get()); |
469 | 474 |
470 transaction_.reset(); | 475 transaction_.reset(); |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
556 DCHECK(response_cookies_.empty()); | 561 DCHECK(response_cookies_.empty()); |
557 | 562 |
558 std::string name = "Set-Cookie"; | 563 std::string name = "Set-Cookie"; |
559 std::string value; | 564 std::string value; |
560 | 565 |
561 void* iter = NULL; | 566 void* iter = NULL; |
562 while (response_info_->headers->EnumerateHeader(&iter, name, &value)) | 567 while (response_info_->headers->EnumerateHeader(&iter, name, &value)) |
563 response_cookies_.push_back(value); | 568 response_cookies_.push_back(value); |
564 } | 569 } |
565 | 570 |
OLD | NEW |