OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "net/url_request/url_request_job.h" | 5 #include "net/url_request/url_request_job.h" |
6 | 6 |
7 #include "base/compiler_specific.h" | 7 #include "base/compiler_specific.h" |
8 #include "base/message_loop.h" | 8 #include "base/message_loop.h" |
9 #include "base/string_number_conversions.h" | 9 #include "base/string_number_conversions.h" |
10 #include "base/string_util.h" | 10 #include "base/string_util.h" |
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
299 // The headers should be complete before reads complete | 299 // The headers should be complete before reads complete |
300 DCHECK(has_handled_response_); | 300 DCHECK(has_handled_response_); |
301 | 301 |
302 OnRawReadComplete(bytes_read); | 302 OnRawReadComplete(bytes_read); |
303 | 303 |
304 // Don't notify if we had an error. | 304 // Don't notify if we had an error. |
305 if (!request_->status().is_success()) | 305 if (!request_->status().is_success()) |
306 return; | 306 return; |
307 | 307 |
308 // When notifying the delegate, the delegate can release the request | 308 // When notifying the delegate, the delegate can release the request |
309 // (and thus release 'this'). After calling to the delgate, we must | 309 // (and thus release 'this'). After calling to the delegate, we must |
310 // check the request pointer to see if it still exists, and return | 310 // check the request pointer to see if it still exists, and return |
311 // immediately if it has been destroyed. self_preservation ensures our | 311 // immediately if it has been destroyed. self_preservation ensures our |
312 // survival until we can get out of this method. | 312 // survival until we can get out of this method. |
313 scoped_refptr<URLRequestJob> self_preservation(this); | 313 scoped_refptr<URLRequestJob> self_preservation(this); |
314 | 314 |
315 prefilter_bytes_read_ += bytes_read; | |
316 if (filter_.get()) { | 315 if (filter_.get()) { |
317 // Tell the filter that it has more data | 316 // Tell the filter that it has more data |
318 FilteredDataRead(bytes_read); | 317 FilteredDataRead(bytes_read); |
319 | 318 |
320 // Filter the data. | 319 // Filter the data. |
321 int filter_bytes_read = 0; | 320 int filter_bytes_read = 0; |
322 if (ReadFilteredData(&filter_bytes_read)) { | 321 if (ReadFilteredData(&filter_bytes_read)) { |
323 postfilter_bytes_read_ += filter_bytes_read; | |
324 request_->delegate()->OnReadCompleted(request_, filter_bytes_read); | 322 request_->delegate()->OnReadCompleted(request_, filter_bytes_read); |
325 } | 323 } |
326 } else { | 324 } else { |
327 postfilter_bytes_read_ += bytes_read; | |
328 request_->delegate()->OnReadCompleted(request_, bytes_read); | 325 request_->delegate()->OnReadCompleted(request_, bytes_read); |
329 } | 326 } |
| 327 DVLOG(1) << __FUNCTION__ << "() " |
| 328 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 329 << " pre bytes read = " << bytes_read |
| 330 << " pre total = " << prefilter_bytes_read_ |
| 331 << " post total = " << postfilter_bytes_read_; |
330 } | 332 } |
331 | 333 |
332 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { | 334 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { |
333 DCHECK(!has_handled_response_); | 335 DCHECK(!has_handled_response_); |
334 has_handled_response_ = true; | 336 has_handled_response_ = true; |
335 if (request_) { | 337 if (request_) { |
336 request_->set_status(status); | 338 request_->set_status(status); |
337 request_->ResponseStarted(); | 339 request_->ResponseStarted(); |
338 } | 340 } |
339 } | 341 } |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
461 // and we should have at least tried to fill up the filter's input buffer. | 463 // and we should have at least tried to fill up the filter's input buffer. |
462 // Correct the state, and try again. | 464 // Correct the state, and try again. |
463 filter_needs_more_output_space_ = false; | 465 filter_needs_more_output_space_ = false; |
464 return ReadFilteredData(bytes_read); | 466 return ReadFilteredData(bytes_read); |
465 } | 467 } |
466 | 468 |
467 switch (status) { | 469 switch (status) { |
468 case Filter::FILTER_DONE: { | 470 case Filter::FILTER_DONE: { |
469 filter_needs_more_output_space_ = false; | 471 filter_needs_more_output_space_ = false; |
470 *bytes_read = filtered_data_len; | 472 *bytes_read = filtered_data_len; |
| 473 postfilter_bytes_read_ += filtered_data_len; |
471 rv = true; | 474 rv = true; |
472 break; | 475 break; |
473 } | 476 } |
474 case Filter::FILTER_NEED_MORE_DATA: { | 477 case Filter::FILTER_NEED_MORE_DATA: { |
475 filter_needs_more_output_space_ = | 478 filter_needs_more_output_space_ = |
476 (filtered_data_len == output_buffer_size); | 479 (filtered_data_len == output_buffer_size); |
477 // We have finished filtering all data currently in the buffer. | 480 // We have finished filtering all data currently in the buffer. |
478 // There might be some space left in the output buffer. One can | 481 // There might be some space left in the output buffer. One can |
479 // consider reading more data from the stream to feed the filter | 482 // consider reading more data from the stream to feed the filter |
480 // and filling up the output buffer. This leads to more complicated | 483 // and filling up the output buffer. This leads to more complicated |
481 // buffer management and data notification mechanisms. | 484 // buffer management and data notification mechanisms. |
482 // We can revisit this issue if there is a real perf need. | 485 // We can revisit this issue if there is a real perf need. |
483 if (filtered_data_len > 0) { | 486 if (filtered_data_len > 0) { |
484 *bytes_read = filtered_data_len; | 487 *bytes_read = filtered_data_len; |
| 488 postfilter_bytes_read_ += filtered_data_len; |
485 rv = true; | 489 rv = true; |
486 } else { | 490 } else { |
487 // Read again since we haven't received enough data yet (e.g., we may | 491 // Read again since we haven't received enough data yet (e.g., we may |
488 // not have a complete gzip header yet) | 492 // not have a complete gzip header yet) |
489 rv = ReadFilteredData(bytes_read); | 493 rv = ReadFilteredData(bytes_read); |
490 } | 494 } |
491 break; | 495 break; |
492 } | 496 } |
493 case Filter::FILTER_OK: { | 497 case Filter::FILTER_OK: { |
494 filter_needs_more_output_space_ = | 498 filter_needs_more_output_space_ = |
495 (filtered_data_len == output_buffer_size); | 499 (filtered_data_len == output_buffer_size); |
496 *bytes_read = filtered_data_len; | 500 *bytes_read = filtered_data_len; |
| 501 postfilter_bytes_read_ += filtered_data_len; |
497 rv = true; | 502 rv = true; |
498 break; | 503 break; |
499 } | 504 } |
500 case Filter::FILTER_ERROR: { | 505 case Filter::FILTER_ERROR: { |
| 506 DVLOG(1) << __FUNCTION__ << "() " |
| 507 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 508 << " Filter Error"; |
501 filter_needs_more_output_space_ = false; | 509 filter_needs_more_output_space_ = false; |
502 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, | 510 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
503 ERR_CONTENT_DECODING_FAILED)); | 511 ERR_CONTENT_DECODING_FAILED)); |
504 rv = false; | 512 rv = false; |
505 break; | 513 break; |
506 } | 514 } |
507 default: { | 515 default: { |
508 NOTREACHED(); | 516 NOTREACHED(); |
509 filter_needs_more_output_space_ = false; | 517 filter_needs_more_output_space_ = false; |
510 rv = false; | 518 rv = false; |
511 break; | 519 break; |
512 } | 520 } |
513 } | 521 } |
| 522 DVLOG(2) << __FUNCTION__ << "() " |
| 523 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 524 << " rv = " << rv |
| 525 << " post bytes read = " << filtered_data_len |
| 526 << " pre total = " << prefilter_bytes_read_ |
| 527 << " post total = " |
| 528 << postfilter_bytes_read_; |
514 } else { | 529 } else { |
515 // we are done, or there is no data left. | 530 // we are done, or there is no data left. |
516 rv = true; | 531 rv = true; |
517 } | 532 } |
518 | 533 |
519 if (rv) { | 534 if (rv) { |
520 // When we successfully finished a read, we no longer need to | 535 // When we successfully finished a read, we no longer need to |
521 // save the caller's buffers. Release our reference. | 536 // save the caller's buffers. Release our reference. |
522 filtered_read_buffer_ = NULL; | 537 filtered_read_buffer_ = NULL; |
523 filtered_read_buffer_len_ = 0; | 538 filtered_read_buffer_len_ = 0; |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
588 void URLRequestJob::OnRawReadComplete(int bytes_read) { | 603 void URLRequestJob::OnRawReadComplete(int bytes_read) { |
589 DCHECK(raw_read_buffer_); | 604 DCHECK(raw_read_buffer_); |
590 if (bytes_read > 0) { | 605 if (bytes_read > 0) { |
591 RecordBytesRead(bytes_read); | 606 RecordBytesRead(bytes_read); |
592 } | 607 } |
593 raw_read_buffer_ = NULL; | 608 raw_read_buffer_ = NULL; |
594 } | 609 } |
595 | 610 |
596 void URLRequestJob::RecordBytesRead(int bytes_read) { | 611 void URLRequestJob::RecordBytesRead(int bytes_read) { |
597 filter_input_byte_count_ += bytes_read; | 612 filter_input_byte_count_ += bytes_read; |
| 613 prefilter_bytes_read_ += bytes_read; |
| 614 if (!filter_.get()) |
| 615 postfilter_bytes_read_ += bytes_read; |
| 616 DVLOG(2) << __FUNCTION__ << "() " |
| 617 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 618 << " pre bytes read = " << bytes_read |
| 619 << " pre total = " << prefilter_bytes_read_ |
| 620 << " post total = " << postfilter_bytes_read_; |
598 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. | 621 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. |
599 g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(), | 622 g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(), |
600 bytes_read); | 623 bytes_read); |
601 } | 624 } |
602 | 625 |
603 bool URLRequestJob::FilterHasData() { | 626 bool URLRequestJob::FilterHasData() { |
604 return filter_.get() && filter_->stream_data_len(); | 627 return filter_.get() && filter_->stream_data_len(); |
605 } | 628 } |
606 | 629 |
607 void URLRequestJob::UpdatePacketReadTimes() { | 630 void URLRequestJob::UpdatePacketReadTimes() { |
608 } | 631 } |
609 | 632 |
610 } // namespace net | 633 } // namespace net |
OLD | NEW |