Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(647)

Side by Side Diff: net/url_request/url_request_job.cc

Issue 266243004: Clang format slam. Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/url_request/url_request_job.h" 5 #include "net/url_request/url_request_job.h"
6 6
7 #include "base/bind.h" 7 #include "base/bind.h"
8 #include "base/compiler_specific.h" 8 #include "base/compiler_specific.h"
9 #include "base/message_loop/message_loop.h" 9 #include "base/message_loop/message_loop.h"
10 #include "base/power_monitor/power_monitor.h" 10 #include "base/power_monitor/power_monitor.h"
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
58 NotifyCanceled(); 58 NotifyCanceled();
59 } 59 }
60 60
61 void URLRequestJob::DetachRequest() { 61 void URLRequestJob::DetachRequest() {
62 request_ = NULL; 62 request_ = NULL;
63 } 63 }
64 64
65 // This function calls ReadData to get stream data. If a filter exists, passes 65 // This function calls ReadData to get stream data. If a filter exists, passes
66 // the data to the attached filter. Then returns the output from filter back to 66 // the data to the attached filter. Then returns the output from filter back to
67 // the caller. 67 // the caller.
68 bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) { 68 bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int* bytes_read) {
69 bool rv = false; 69 bool rv = false;
70 70
71 DCHECK_LT(buf_size, 1000000); // Sanity check. 71 DCHECK_LT(buf_size, 1000000); // Sanity check.
72 DCHECK(buf); 72 DCHECK(buf);
73 DCHECK(bytes_read); 73 DCHECK(bytes_read);
74 DCHECK(filtered_read_buffer_.get() == NULL); 74 DCHECK(filtered_read_buffer_.get() == NULL);
75 DCHECK_EQ(0, filtered_read_buffer_len_); 75 DCHECK_EQ(0, filtered_read_buffer_len_);
76 76
77 *bytes_read = 0; 77 *bytes_read = 0;
78 78
79 // Skip Filter if not present. 79 // Skip Filter if not present.
80 if (!filter_.get()) { 80 if (!filter_.get()) {
81 rv = ReadRawDataHelper(buf, buf_size, bytes_read); 81 rv = ReadRawDataHelper(buf, buf_size, bytes_read);
82 } else { 82 } else {
83 // Save the caller's buffers while we do IO 83 // Save the caller's buffers while we do IO
84 // in the filter's buffers. 84 // in the filter's buffers.
85 filtered_read_buffer_ = buf; 85 filtered_read_buffer_ = buf;
86 filtered_read_buffer_len_ = buf_size; 86 filtered_read_buffer_len_ = buf_size;
87 87
88 if (ReadFilteredData(bytes_read)) { 88 if (ReadFilteredData(bytes_read)) {
89 rv = true; // We have data to return. 89 rv = true; // We have data to return.
90 90
91 // It is fine to call DoneReading even if ReadFilteredData receives 0 91 // It is fine to call DoneReading even if ReadFilteredData receives 0
92 // bytes from the net, but we avoid making that call if we know for 92 // bytes from the net, but we avoid making that call if we know for
93 // sure that's the case (ReadRawDataHelper path). 93 // sure that's the case (ReadRawDataHelper path).
94 if (*bytes_read == 0) 94 if (*bytes_read == 0)
95 DoneReading(); 95 DoneReading();
96 } else { 96 } else {
97 rv = false; // Error, or a new IO is pending. 97 rv = false; // Error, or a new IO is pending.
98 } 98 }
99 } 99 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
135 } 135 }
136 136
137 bool URLRequestJob::GetResponseCookies(std::vector<std::string>* cookies) { 137 bool URLRequestJob::GetResponseCookies(std::vector<std::string>* cookies) {
138 return false; 138 return false;
139 } 139 }
140 140
141 Filter* URLRequestJob::SetupFilter() const { 141 Filter* URLRequestJob::SetupFilter() const {
142 return NULL; 142 return NULL;
143 } 143 }
144 144
145 bool URLRequestJob::IsRedirectResponse(GURL* location, 145 bool URLRequestJob::IsRedirectResponse(GURL* location, int* http_status_code) {
146 int* http_status_code) {
147 // For non-HTTP jobs, headers will be null. 146 // For non-HTTP jobs, headers will be null.
148 HttpResponseHeaders* headers = request_->response_headers(); 147 HttpResponseHeaders* headers = request_->response_headers();
149 if (!headers) 148 if (!headers)
150 return false; 149 return false;
151 150
152 std::string value; 151 std::string value;
153 if (!headers->IsRedirect(&value)) 152 if (!headers->IsRedirect(&value))
154 return false; 153 return false;
155 154
156 *location = request_->url().Resolve(value); 155 *location = request_->url().Resolve(value);
(...skipping 25 matching lines...) Expand all
182 // case the derived class should implement this! 181 // case the derived class should implement this!
183 NOTREACHED(); 182 NOTREACHED();
184 } 183 }
185 184
186 void URLRequestJob::CancelAuth() { 185 void URLRequestJob::CancelAuth() {
187 // This will only be called if NeedsAuth() returns true, in which 186 // This will only be called if NeedsAuth() returns true, in which
188 // case the derived class should implement this! 187 // case the derived class should implement this!
189 NOTREACHED(); 188 NOTREACHED();
190 } 189 }
191 190
192 void URLRequestJob::ContinueWithCertificate( 191 void URLRequestJob::ContinueWithCertificate(X509Certificate* client_cert) {
193 X509Certificate* client_cert) {
194 // The derived class should implement this! 192 // The derived class should implement this!
195 NOTREACHED(); 193 NOTREACHED();
196 } 194 }
197 195
198 void URLRequestJob::ContinueDespiteLastError() { 196 void URLRequestJob::ContinueDespiteLastError() {
199 // Implementations should know how to recover from errors they generate. 197 // Implementations should know how to recover from errors they generate.
200 // If this code was reached, we are trying to recover from an error that 198 // If this code was reached, we are trying to recover from an error that
201 // we don't know how to recover from. 199 // we don't know how to recover from.
202 NOTREACHED(); 200 NOTREACHED();
203 } 201 }
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
392 390
393 request_->NotifyResponseStarted(); 391 request_->NotifyResponseStarted();
394 } 392 }
395 393
396 void URLRequestJob::NotifyReadComplete(int bytes_read) { 394 void URLRequestJob::NotifyReadComplete(int bytes_read) {
397 if (!request_ || !request_->has_delegate()) 395 if (!request_ || !request_->has_delegate())
398 return; // The request was destroyed, so there is no more work to do. 396 return; // The request was destroyed, so there is no more work to do.
399 397
400 // TODO(darin): Bug 1004233. Re-enable this test once all of the chrome 398 // TODO(darin): Bug 1004233. Re-enable this test once all of the chrome
401 // unit_tests have been fixed to not trip this. 399 // unit_tests have been fixed to not trip this.
402 //DCHECK(!request_->status().is_io_pending()); 400 // DCHECK(!request_->status().is_io_pending());
403 401
404 // The headers should be complete before reads complete 402 // The headers should be complete before reads complete
405 DCHECK(has_handled_response_); 403 DCHECK(has_handled_response_);
406 404
407 OnRawReadComplete(bytes_read); 405 OnRawReadComplete(bytes_read);
408 406
409 // Don't notify if we had an error. 407 // Don't notify if we had an error.
410 if (!request_->status().is_success()) 408 if (!request_->status().is_success())
411 return; 409 return;
412 410
(...skipping 18 matching lines...) Expand all
431 } else { 429 } else {
432 request_->NotifyReadCompleted(bytes_read); 430 request_->NotifyReadCompleted(bytes_read);
433 } 431 }
434 DVLOG(1) << __FUNCTION__ << "() " 432 DVLOG(1) << __FUNCTION__ << "() "
435 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" 433 << "\"" << (request_ ? request_->url().spec() : "???") << "\""
436 << " pre bytes read = " << bytes_read 434 << " pre bytes read = " << bytes_read
437 << " pre total = " << prefilter_bytes_read_ 435 << " pre total = " << prefilter_bytes_read_
438 << " post total = " << postfilter_bytes_read_; 436 << " post total = " << postfilter_bytes_read_;
439 } 437 }
440 438
441 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { 439 void URLRequestJob::NotifyStartError(const URLRequestStatus& status) {
442 DCHECK(!has_handled_response_); 440 DCHECK(!has_handled_response_);
443 has_handled_response_ = true; 441 has_handled_response_ = true;
444 if (request_) { 442 if (request_) {
445 // There may be relevant information in the response info even in the 443 // There may be relevant information in the response info even in the
446 // error case. 444 // error case.
447 GetResponseInfo(&request_->response_info_); 445 GetResponseInfo(&request_->response_info_);
448 446
449 request_->set_status(status); 447 request_->set_status(status);
450 request_->NotifyResponseStarted(); 448 request_->NotifyResponseStarted();
451 // We may have been deleted. 449 // We may have been deleted.
452 } 450 }
453 } 451 }
454 452
455 void URLRequestJob::NotifyDone(const URLRequestStatus &status) { 453 void URLRequestJob::NotifyDone(const URLRequestStatus& status) {
456 DCHECK(!done_) << "Job sending done notification twice"; 454 DCHECK(!done_) << "Job sending done notification twice";
457 if (done_) 455 if (done_)
458 return; 456 return;
459 done_ = true; 457 done_ = true;
460 458
461 // Unless there was an error, we should have at least tried to handle 459 // Unless there was an error, we should have at least tried to handle
462 // the response before getting here. 460 // the response before getting here.
463 DCHECK(has_handled_response_ || !status.is_success()); 461 DCHECK(has_handled_response_ || !status.is_success());
464 462
465 // As with NotifyReadComplete, we need to take care to notice if we were 463 // As with NotifyReadComplete, we need to take care to notice if we were
(...skipping 18 matching lines...) Expand all
484 // Complete this notification later. This prevents us from re-entering the 482 // Complete this notification later. This prevents us from re-entering the
485 // delegate if we're done because of a synchronous call. 483 // delegate if we're done because of a synchronous call.
486 base::MessageLoop::current()->PostTask( 484 base::MessageLoop::current()->PostTask(
487 FROM_HERE, 485 FROM_HERE,
488 base::Bind(&URLRequestJob::CompleteNotifyDone, 486 base::Bind(&URLRequestJob::CompleteNotifyDone,
489 weak_factory_.GetWeakPtr())); 487 weak_factory_.GetWeakPtr()));
490 } 488 }
491 489
492 void URLRequestJob::CompleteNotifyDone() { 490 void URLRequestJob::CompleteNotifyDone() {
493 // Check if we should notify the delegate that we're done because of an error. 491 // Check if we should notify the delegate that we're done because of an error.
494 if (request_ && 492 if (request_ && !request_->status().is_success() &&
495 !request_->status().is_success() &&
496 request_->has_delegate()) { 493 request_->has_delegate()) {
497 // We report the error differently depending on whether we've called 494 // We report the error differently depending on whether we've called
498 // OnResponseStarted yet. 495 // OnResponseStarted yet.
499 if (has_handled_response_) { 496 if (has_handled_response_) {
500 // We signal the error by calling OnReadComplete with a bytes_read of -1. 497 // We signal the error by calling OnReadComplete with a bytes_read of -1.
501 request_->NotifyReadCompleted(-1); 498 request_->NotifyReadCompleted(-1);
502 } else { 499 } else {
503 has_handled_response_ = true; 500 has_handled_response_ = true;
504 request_->NotifyResponseStarted(); 501 request_->NotifyResponseStarted();
505 } 502 }
(...skipping 13 matching lines...) Expand all
519 } 516 }
520 517
521 void URLRequestJob::OnCallToDelegate() { 518 void URLRequestJob::OnCallToDelegate() {
522 request_->OnCallToDelegate(); 519 request_->OnCallToDelegate();
523 } 520 }
524 521
525 void URLRequestJob::OnCallToDelegateComplete() { 522 void URLRequestJob::OnCallToDelegateComplete() {
526 request_->OnCallToDelegateComplete(); 523 request_->OnCallToDelegateComplete();
527 } 524 }
528 525
529 bool URLRequestJob::ReadRawData(IOBuffer* buf, int buf_size, 526 bool URLRequestJob::ReadRawData(IOBuffer* buf, int buf_size, int* bytes_read) {
530 int *bytes_read) {
531 DCHECK(bytes_read); 527 DCHECK(bytes_read);
532 *bytes_read = 0; 528 *bytes_read = 0;
533 return true; 529 return true;
534 } 530 }
535 531
536 void URLRequestJob::DoneReading() { 532 void URLRequestJob::DoneReading() {
537 // Do nothing. 533 // Do nothing.
538 } 534 }
539 535
540 void URLRequestJob::DoneReadingRedirectResponse() { 536 void URLRequestJob::DoneReadingRedirectResponse() {
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
620 } 616 }
621 case Filter::FILTER_OK: { 617 case Filter::FILTER_OK: {
622 *bytes_read = filtered_data_len; 618 *bytes_read = filtered_data_len;
623 postfilter_bytes_read_ += filtered_data_len; 619 postfilter_bytes_read_ += filtered_data_len;
624 rv = true; 620 rv = true;
625 break; 621 break;
626 } 622 }
627 case Filter::FILTER_ERROR: { 623 case Filter::FILTER_ERROR: {
628 DVLOG(1) << __FUNCTION__ << "() " 624 DVLOG(1) << __FUNCTION__ << "() "
629 << "\"" << (request_ ? request_->url().spec() : "???") 625 << "\"" << (request_ ? request_->url().spec() : "???")
630 << "\"" << " Filter Error"; 626 << "\""
627 << " Filter Error";
631 filter_needs_more_output_space_ = false; 628 filter_needs_more_output_space_ = false;
632 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, 629 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
633 ERR_CONTENT_DECODING_FAILED)); 630 ERR_CONTENT_DECODING_FAILED));
634 rv = false; 631 rv = false;
635 break; 632 break;
636 } 633 }
637 default: { 634 default: {
638 NOTREACHED(); 635 NOTREACHED();
639 filter_needs_more_output_space_ = false; 636 filter_needs_more_output_space_ = false;
640 rv = false; 637 rv = false;
641 break; 638 break;
642 } 639 }
643 } 640 }
644 641
645 // If logging all bytes is enabled, log the filtered bytes read. 642 // If logging all bytes is enabled, log the filtered bytes read.
646 if (rv && request() && request()->net_log().IsLoggingBytes() && 643 if (rv && request() && request()->net_log().IsLoggingBytes() &&
647 filtered_data_len > 0) { 644 filtered_data_len > 0) {
648 request()->net_log().AddByteTransferEvent( 645 request()->net_log().AddByteTransferEvent(
649 NetLog::TYPE_URL_REQUEST_JOB_FILTERED_BYTES_READ, 646 NetLog::TYPE_URL_REQUEST_JOB_FILTERED_BYTES_READ,
650 filtered_data_len, filtered_read_buffer_->data()); 647 filtered_data_len,
648 filtered_read_buffer_->data());
651 } 649 }
652 } else { 650 } else {
653 // we are done, or there is no data left. 651 // we are done, or there is no data left.
654 rv = true; 652 rv = true;
655 } 653 }
656 break; 654 break;
657 } 655 }
658 656
659 if (rv) { 657 if (rv) {
660 // When we successfully finished a read, we no longer need to save the 658 // When we successfully finished a read, we no longer need to save the
661 // caller's buffers. Release our reference. 659 // caller's buffers. Release our reference.
662 filtered_read_buffer_ = NULL; 660 filtered_read_buffer_ = NULL;
663 filtered_read_buffer_len_ = 0; 661 filtered_read_buffer_len_ = 0;
664 } 662 }
665 return rv; 663 return rv;
666 } 664 }
667 665
668 void URLRequestJob::DestroyFilters() { 666 void URLRequestJob::DestroyFilters() {
669 filter_.reset(); 667 filter_.reset();
670 } 668 }
671 669
672 const URLRequestStatus URLRequestJob::GetStatus() { 670 const URLRequestStatus URLRequestJob::GetStatus() {
673 if (request_) 671 if (request_)
674 return request_->status(); 672 return request_->status();
675 // If the request is gone, we must be cancelled. 673 // If the request is gone, we must be cancelled.
676 return URLRequestStatus(URLRequestStatus::CANCELED, 674 return URLRequestStatus(URLRequestStatus::CANCELED, ERR_ABORTED);
677 ERR_ABORTED);
678 } 675 }
679 676
680 void URLRequestJob::SetStatus(const URLRequestStatus &status) { 677 void URLRequestJob::SetStatus(const URLRequestStatus& status) {
681 if (request_) 678 if (request_)
682 request_->set_status(status); 679 request_->set_status(status);
683 } 680 }
684 681
685 bool URLRequestJob::ReadRawDataForFilter(int* bytes_read) { 682 bool URLRequestJob::ReadRawDataForFilter(int* bytes_read) {
686 bool rv = false; 683 bool rv = false;
687 684
688 DCHECK(bytes_read); 685 DCHECK(bytes_read);
689 DCHECK(filter_.get()); 686 DCHECK(filter_.get());
690 687
691 *bytes_read = 0; 688 *bytes_read = 0;
692 689
693 // Get more pre-filtered data if needed. 690 // Get more pre-filtered data if needed.
694 // TODO(mbelshe): is it possible that the filter needs *MORE* data 691 // TODO(mbelshe): is it possible that the filter needs *MORE* data
695 // when there is some data already in the buffer? 692 // when there is some data already in the buffer?
696 if (!filter_->stream_data_len() && !is_done()) { 693 if (!filter_->stream_data_len() && !is_done()) {
697 IOBuffer* stream_buffer = filter_->stream_buffer(); 694 IOBuffer* stream_buffer = filter_->stream_buffer();
698 int stream_buffer_size = filter_->stream_buffer_size(); 695 int stream_buffer_size = filter_->stream_buffer_size();
699 rv = ReadRawDataHelper(stream_buffer, stream_buffer_size, bytes_read); 696 rv = ReadRawDataHelper(stream_buffer, stream_buffer_size, bytes_read);
700 } 697 }
701 return rv; 698 return rv;
702 } 699 }
703 700
704 bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf, int buf_size, 701 bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf,
702 int buf_size,
705 int* bytes_read) { 703 int* bytes_read) {
706 DCHECK(!request_->status().is_io_pending()); 704 DCHECK(!request_->status().is_io_pending());
707 DCHECK(raw_read_buffer_.get() == NULL); 705 DCHECK(raw_read_buffer_.get() == NULL);
708 706
709 // Keep a pointer to the read buffer, so we have access to it in the 707 // Keep a pointer to the read buffer, so we have access to it in the
710 // OnRawReadComplete() callback in the event that the read completes 708 // OnRawReadComplete() callback in the event that the read completes
711 // asynchronously. 709 // asynchronously.
712 raw_read_buffer_ = buf; 710 raw_read_buffer_ = buf;
713 bool rv = ReadRawData(buf, buf_size, bytes_read); 711 bool rv = ReadRawData(buf, buf_size, bytes_read);
714 712
(...skipping 12 matching lines...) Expand all
727 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); 725 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv));
728 } 726 }
729 727
730 void URLRequestJob::OnRawReadComplete(int bytes_read) { 728 void URLRequestJob::OnRawReadComplete(int bytes_read) {
731 DCHECK(raw_read_buffer_.get()); 729 DCHECK(raw_read_buffer_.get());
732 // If |filter_| is non-NULL, bytes will be logged after it is applied instead. 730 // If |filter_| is non-NULL, bytes will be logged after it is applied instead.
733 if (!filter_.get() && request() && request()->net_log().IsLoggingBytes() && 731 if (!filter_.get() && request() && request()->net_log().IsLoggingBytes() &&
734 bytes_read > 0) { 732 bytes_read > 0) {
735 request()->net_log().AddByteTransferEvent( 733 request()->net_log().AddByteTransferEvent(
736 NetLog::TYPE_URL_REQUEST_JOB_BYTES_READ, 734 NetLog::TYPE_URL_REQUEST_JOB_BYTES_READ,
737 bytes_read, raw_read_buffer_->data()); 735 bytes_read,
736 raw_read_buffer_->data());
738 } 737 }
739 738
740 if (bytes_read > 0) { 739 if (bytes_read > 0) {
741 RecordBytesRead(bytes_read); 740 RecordBytesRead(bytes_read);
742 } 741 }
743 raw_read_buffer_ = NULL; 742 raw_read_buffer_ = NULL;
744 } 743 }
745 744
746 void URLRequestJob::RecordBytesRead(int bytes_read) { 745 void URLRequestJob::RecordBytesRead(int bytes_read) {
747 filter_input_byte_count_ += bytes_read; 746 filter_input_byte_count_ += bytes_read;
748 prefilter_bytes_read_ += bytes_read; 747 prefilter_bytes_read_ += bytes_read;
749 if (!filter_.get()) 748 if (!filter_.get())
750 postfilter_bytes_read_ += bytes_read; 749 postfilter_bytes_read_ += bytes_read;
751 DVLOG(2) << __FUNCTION__ << "() " 750 DVLOG(2) << __FUNCTION__ << "() "
752 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" 751 << "\"" << (request_ ? request_->url().spec() : "???") << "\""
753 << " pre bytes read = " << bytes_read 752 << " pre bytes read = " << bytes_read
754 << " pre total = " << prefilter_bytes_read_ 753 << " pre total = " << prefilter_bytes_read_
755 << " post total = " << postfilter_bytes_read_; 754 << " post total = " << postfilter_bytes_read_;
756 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. 755 UpdatePacketReadTimes(); // Facilitate stats recording if it is active.
757 if (network_delegate_) 756 if (network_delegate_)
758 network_delegate_->NotifyRawBytesRead(*request_, bytes_read); 757 network_delegate_->NotifyRawBytesRead(*request_, bytes_read);
759 } 758 }
760 759
761 bool URLRequestJob::FilterHasData() { 760 bool URLRequestJob::FilterHasData() {
762 return filter_.get() && filter_->stream_data_len(); 761 return filter_.get() && filter_->stream_data_len();
763 } 762 }
764 763
765 void URLRequestJob::UpdatePacketReadTimes() { 764 void URLRequestJob::UpdatePacketReadTimes() {
766 } 765 }
767 766
768 } // namespace net 767 } // namespace net
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698