Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(912)

Side by Side Diff: net/url_request/url_request_job.cc

Issue 6881106: Treat ERR_CONNECTION_CLOSED as end-of-data marker for downloads. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Adjustments per rvargas' comments. Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « net/url_request/url_request_http_job.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/url_request/url_request_job.h" 5 #include "net/url_request/url_request_job.h"
6 6
7 #include "base/compiler_specific.h" 7 #include "base/compiler_specific.h"
8 #include "base/message_loop.h" 8 #include "base/message_loop.h"
9 #include "base/string_number_conversions.h" 9 #include "base/string_number_conversions.h"
10 #include "base/string_util.h" 10 #include "base/string_util.h"
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after
299 // The headers should be complete before reads complete 299 // The headers should be complete before reads complete
300 DCHECK(has_handled_response_); 300 DCHECK(has_handled_response_);
301 301
302 OnRawReadComplete(bytes_read); 302 OnRawReadComplete(bytes_read);
303 303
304 // Don't notify if we had an error. 304 // Don't notify if we had an error.
305 if (!request_->status().is_success()) 305 if (!request_->status().is_success())
306 return; 306 return;
307 307
308 // When notifying the delegate, the delegate can release the request 308 // When notifying the delegate, the delegate can release the request
309 // (and thus release 'this'). After calling to the delgate, we must 309 // (and thus release 'this'). After calling to the delegate, we must
310 // check the request pointer to see if it still exists, and return 310 // check the request pointer to see if it still exists, and return
311 // immediately if it has been destroyed. self_preservation ensures our 311 // immediately if it has been destroyed. self_preservation ensures our
312 // survival until we can get out of this method. 312 // survival until we can get out of this method.
313 scoped_refptr<URLRequestJob> self_preservation(this); 313 scoped_refptr<URLRequestJob> self_preservation(this);
314 314
315 prefilter_bytes_read_ += bytes_read;
316 if (filter_.get()) { 315 if (filter_.get()) {
317 // Tell the filter that it has more data 316 // Tell the filter that it has more data
318 FilteredDataRead(bytes_read); 317 FilteredDataRead(bytes_read);
319 318
320 // Filter the data. 319 // Filter the data.
321 int filter_bytes_read = 0; 320 int filter_bytes_read = 0;
322 if (ReadFilteredData(&filter_bytes_read)) { 321 if (ReadFilteredData(&filter_bytes_read)) {
323 postfilter_bytes_read_ += filter_bytes_read;
324 request_->delegate()->OnReadCompleted(request_, filter_bytes_read); 322 request_->delegate()->OnReadCompleted(request_, filter_bytes_read);
325 } 323 }
326 } else { 324 } else {
327 postfilter_bytes_read_ += bytes_read;
328 request_->delegate()->OnReadCompleted(request_, bytes_read); 325 request_->delegate()->OnReadCompleted(request_, bytes_read);
329 } 326 }
327 VLOG(21) << __FUNCTION__ << "() "
rvargas (doing something else) 2011/05/21 01:24:50 DVLOG. I'm still not too happy about the 21... it'
ahendrickson 2011/05/22 06:43:34 Reduced the number to 2 (in case anyone adds VLOGs
328 << "\"" << (request_ ? request_->url().spec() : "???") << "\""
329 << " pre bytes read = " << bytes_read
330 << " pre total = " << prefilter_bytes_read_
331 << " post total = " << postfilter_bytes_read_;
330 } 332 }
331 333
332 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { 334 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) {
333 DCHECK(!has_handled_response_); 335 DCHECK(!has_handled_response_);
334 has_handled_response_ = true; 336 has_handled_response_ = true;
335 if (request_) { 337 if (request_) {
336 request_->set_status(status); 338 request_->set_status(status);
337 request_->ResponseStarted(); 339 request_->ResponseStarted();
338 } 340 }
339 } 341 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
461 // and we should have at least tried to fill up the filter's input buffer. 463 // and we should have at least tried to fill up the filter's input buffer.
462 // Correct the state, and try again. 464 // Correct the state, and try again.
463 filter_needs_more_output_space_ = false; 465 filter_needs_more_output_space_ = false;
464 return ReadFilteredData(bytes_read); 466 return ReadFilteredData(bytes_read);
465 } 467 }
466 468
467 switch (status) { 469 switch (status) {
468 case Filter::FILTER_DONE: { 470 case Filter::FILTER_DONE: {
469 filter_needs_more_output_space_ = false; 471 filter_needs_more_output_space_ = false;
470 *bytes_read = filtered_data_len; 472 *bytes_read = filtered_data_len;
473 postfilter_bytes_read_ += filtered_data_len;
471 rv = true; 474 rv = true;
472 break; 475 break;
473 } 476 }
474 case Filter::FILTER_NEED_MORE_DATA: { 477 case Filter::FILTER_NEED_MORE_DATA: {
475 filter_needs_more_output_space_ = 478 filter_needs_more_output_space_ =
476 (filtered_data_len == output_buffer_size); 479 (filtered_data_len == output_buffer_size);
477 // We have finished filtering all data currently in the buffer. 480 // We have finished filtering all data currently in the buffer.
478 // There might be some space left in the output buffer. One can 481 // There might be some space left in the output buffer. One can
479 // consider reading more data from the stream to feed the filter 482 // consider reading more data from the stream to feed the filter
480 // and filling up the output buffer. This leads to more complicated 483 // and filling up the output buffer. This leads to more complicated
481 // buffer management and data notification mechanisms. 484 // buffer management and data notification mechanisms.
482 // We can revisit this issue if there is a real perf need. 485 // We can revisit this issue if there is a real perf need.
483 if (filtered_data_len > 0) { 486 if (filtered_data_len > 0) {
484 *bytes_read = filtered_data_len; 487 *bytes_read = filtered_data_len;
488 postfilter_bytes_read_ += filtered_data_len;
485 rv = true; 489 rv = true;
486 } else { 490 } else {
487 // Read again since we haven't received enough data yet (e.g., we may 491 // Read again since we haven't received enough data yet (e.g., we may
488 // not have a complete gzip header yet) 492 // not have a complete gzip header yet)
489 rv = ReadFilteredData(bytes_read); 493 rv = ReadFilteredData(bytes_read);
490 } 494 }
491 break; 495 break;
492 } 496 }
493 case Filter::FILTER_OK: { 497 case Filter::FILTER_OK: {
494 filter_needs_more_output_space_ = 498 filter_needs_more_output_space_ =
495 (filtered_data_len == output_buffer_size); 499 (filtered_data_len == output_buffer_size);
496 *bytes_read = filtered_data_len; 500 *bytes_read = filtered_data_len;
501 postfilter_bytes_read_ += filtered_data_len;
497 rv = true; 502 rv = true;
498 break; 503 break;
499 } 504 }
500 case Filter::FILTER_ERROR: { 505 case Filter::FILTER_ERROR: {
501 filter_needs_more_output_space_ = false; 506 filter_needs_more_output_space_ = false;
502 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, 507 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
503 ERR_CONTENT_DECODING_FAILED)); 508 ERR_CONTENT_DECODING_FAILED));
504 rv = false; 509 rv = false;
505 break; 510 break;
506 } 511 }
507 default: { 512 default: {
508 NOTREACHED(); 513 NOTREACHED();
509 filter_needs_more_output_space_ = false; 514 filter_needs_more_output_space_ = false;
510 rv = false; 515 rv = false;
511 break; 516 break;
512 } 517 }
513 } 518 }
519 VLOG(21) << __FUNCTION__ << "() "
520 << "\"" << (request_ ? request_->url().spec() : "???") << "\""
521 << " rv = " << rv
522 << " post bytes read = " << filtered_data_len
523 << " pre total = " << prefilter_bytes_read_
524 << " post total = "
525 << postfilter_bytes_read_;
514 } else { 526 } else {
515 // we are done, or there is no data left. 527 // we are done, or there is no data left.
516 rv = true; 528 rv = true;
517 } 529 }
518 530
519 if (rv) { 531 if (rv) {
520 // When we successfully finished a read, we no longer need to 532 // When we successfully finished a read, we no longer need to
521 // save the caller's buffers. Release our reference. 533 // save the caller's buffers. Release our reference.
522 filtered_read_buffer_ = NULL; 534 filtered_read_buffer_ = NULL;
523 filtered_read_buffer_len_ = 0; 535 filtered_read_buffer_len_ = 0;
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
588 void URLRequestJob::OnRawReadComplete(int bytes_read) { 600 void URLRequestJob::OnRawReadComplete(int bytes_read) {
589 DCHECK(raw_read_buffer_); 601 DCHECK(raw_read_buffer_);
590 if (bytes_read > 0) { 602 if (bytes_read > 0) {
591 RecordBytesRead(bytes_read); 603 RecordBytesRead(bytes_read);
592 } 604 }
593 raw_read_buffer_ = NULL; 605 raw_read_buffer_ = NULL;
594 } 606 }
595 607
596 void URLRequestJob::RecordBytesRead(int bytes_read) { 608 void URLRequestJob::RecordBytesRead(int bytes_read) {
597 filter_input_byte_count_ += bytes_read; 609 filter_input_byte_count_ += bytes_read;
610 prefilter_bytes_read_ += bytes_read;
611 if (!filter_.get())
612 postfilter_bytes_read_ += bytes_read;
613 VLOG(21) << __FUNCTION__ << "() "
614 << "\"" << (request_ ? request_->url().spec() : "???") << "\""
615 << " pre bytes read = " << bytes_read
616 << " pre total = " << prefilter_bytes_read_
617 << " post total = " << postfilter_bytes_read_;
598 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. 618 UpdatePacketReadTimes(); // Facilitate stats recording if it is active.
599 g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(), 619 g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(),
600 bytes_read); 620 bytes_read);
601 } 621 }
602 622
603 bool URLRequestJob::FilterHasData() { 623 bool URLRequestJob::FilterHasData() {
604 return filter_.get() && filter_->stream_data_len(); 624 return filter_.get() && filter_->stream_data_len();
605 } 625 }
606 626
607 void URLRequestJob::UpdatePacketReadTimes() { 627 void URLRequestJob::UpdatePacketReadTimes() {
608 } 628 }
609 629
610 } // namespace net 630 } // namespace net
OLDNEW
« no previous file with comments | « net/url_request/url_request_http_job.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698