OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "net/url_request/url_request_job.h" | 5 #include "net/url_request/url_request_job.h" |
6 | 6 |
7 #include "base/compiler_specific.h" | 7 #include "base/compiler_specific.h" |
8 #include "base/message_loop.h" | 8 #include "base/message_loop.h" |
9 #include "base/string_number_conversions.h" | 9 #include "base/string_number_conversions.h" |
10 #include "base/string_util.h" | 10 #include "base/string_util.h" |
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
305 // The headers should be complete before reads complete | 305 // The headers should be complete before reads complete |
306 DCHECK(has_handled_response_); | 306 DCHECK(has_handled_response_); |
307 | 307 |
308 OnRawReadComplete(bytes_read); | 308 OnRawReadComplete(bytes_read); |
309 | 309 |
310 // Don't notify if we had an error. | 310 // Don't notify if we had an error. |
311 if (!request_->status().is_success()) | 311 if (!request_->status().is_success()) |
312 return; | 312 return; |
313 | 313 |
314 // When notifying the delegate, the delegate can release the request | 314 // When notifying the delegate, the delegate can release the request |
315 // (and thus release 'this'). After calling to the delgate, we must | 315 // (and thus release 'this'). After calling to the delegate, we must |
316 // check the request pointer to see if it still exists, and return | 316 // check the request pointer to see if it still exists, and return |
317 // immediately if it has been destroyed. self_preservation ensures our | 317 // immediately if it has been destroyed. self_preservation ensures our |
318 // survival until we can get out of this method. | 318 // survival until we can get out of this method. |
319 scoped_refptr<URLRequestJob> self_preservation(this); | 319 scoped_refptr<URLRequestJob> self_preservation(this); |
320 | 320 |
321 prefilter_bytes_read_ += bytes_read; | |
322 if (filter_.get()) { | 321 if (filter_.get()) { |
323 // Tell the filter that it has more data | 322 // Tell the filter that it has more data |
324 FilteredDataRead(bytes_read); | 323 FilteredDataRead(bytes_read); |
325 | 324 |
326 // Filter the data. | 325 // Filter the data. |
327 int filter_bytes_read = 0; | 326 int filter_bytes_read = 0; |
328 if (ReadFilteredData(&filter_bytes_read)) { | 327 if (ReadFilteredData(&filter_bytes_read)) { |
329 postfilter_bytes_read_ += filter_bytes_read; | |
330 request_->delegate()->OnReadCompleted(request_, filter_bytes_read); | 328 request_->delegate()->OnReadCompleted(request_, filter_bytes_read); |
331 } | 329 } |
332 } else { | 330 } else { |
333 postfilter_bytes_read_ += bytes_read; | |
334 request_->delegate()->OnReadCompleted(request_, bytes_read); | 331 request_->delegate()->OnReadCompleted(request_, bytes_read); |
335 } | 332 } |
| 333 DVLOG(1) << __FUNCTION__ << "() " |
| 334 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 335 << " pre bytes read = " << bytes_read |
| 336 << " pre total = " << prefilter_bytes_read_ |
| 337 << " post total = " << postfilter_bytes_read_; |
336 } | 338 } |
337 | 339 |
338 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { | 340 void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { |
339 DCHECK(!has_handled_response_); | 341 DCHECK(!has_handled_response_); |
340 has_handled_response_ = true; | 342 has_handled_response_ = true; |
341 if (request_) { | 343 if (request_) { |
342 request_->set_status(status); | 344 request_->set_status(status); |
343 request_->ResponseStarted(); | 345 request_->ResponseStarted(); |
344 } | 346 } |
345 } | 347 } |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
465 // and we should have at least tried to fill up the filter's input buffer. | 467 // and we should have at least tried to fill up the filter's input buffer. |
466 // Correct the state, and try again. | 468 // Correct the state, and try again. |
467 filter_needs_more_output_space_ = false; | 469 filter_needs_more_output_space_ = false; |
468 return ReadFilteredData(bytes_read); | 470 return ReadFilteredData(bytes_read); |
469 } | 471 } |
470 | 472 |
471 switch (status) { | 473 switch (status) { |
472 case Filter::FILTER_DONE: { | 474 case Filter::FILTER_DONE: { |
473 filter_needs_more_output_space_ = false; | 475 filter_needs_more_output_space_ = false; |
474 *bytes_read = filtered_data_len; | 476 *bytes_read = filtered_data_len; |
| 477 postfilter_bytes_read_ += filtered_data_len; |
475 rv = true; | 478 rv = true; |
476 break; | 479 break; |
477 } | 480 } |
478 case Filter::FILTER_NEED_MORE_DATA: { | 481 case Filter::FILTER_NEED_MORE_DATA: { |
479 filter_needs_more_output_space_ = | 482 filter_needs_more_output_space_ = |
480 (filtered_data_len == output_buffer_size); | 483 (filtered_data_len == output_buffer_size); |
481 // We have finished filtering all data currently in the buffer. | 484 // We have finished filtering all data currently in the buffer. |
482 // There might be some space left in the output buffer. One can | 485 // There might be some space left in the output buffer. One can |
483 // consider reading more data from the stream to feed the filter | 486 // consider reading more data from the stream to feed the filter |
484 // and filling up the output buffer. This leads to more complicated | 487 // and filling up the output buffer. This leads to more complicated |
485 // buffer management and data notification mechanisms. | 488 // buffer management and data notification mechanisms. |
486 // We can revisit this issue if there is a real perf need. | 489 // We can revisit this issue if there is a real perf need. |
487 if (filtered_data_len > 0) { | 490 if (filtered_data_len > 0) { |
488 *bytes_read = filtered_data_len; | 491 *bytes_read = filtered_data_len; |
| 492 postfilter_bytes_read_ += filtered_data_len; |
489 rv = true; | 493 rv = true; |
490 } else { | 494 } else { |
491 // Read again since we haven't received enough data yet (e.g., we may | 495 // Read again since we haven't received enough data yet (e.g., we may |
492 // not have a complete gzip header yet) | 496 // not have a complete gzip header yet) |
493 rv = ReadFilteredData(bytes_read); | 497 rv = ReadFilteredData(bytes_read); |
494 } | 498 } |
495 break; | 499 break; |
496 } | 500 } |
497 case Filter::FILTER_OK: { | 501 case Filter::FILTER_OK: { |
498 filter_needs_more_output_space_ = | 502 filter_needs_more_output_space_ = |
499 (filtered_data_len == output_buffer_size); | 503 (filtered_data_len == output_buffer_size); |
500 *bytes_read = filtered_data_len; | 504 *bytes_read = filtered_data_len; |
| 505 postfilter_bytes_read_ += filtered_data_len; |
501 rv = true; | 506 rv = true; |
502 break; | 507 break; |
503 } | 508 } |
504 case Filter::FILTER_ERROR: { | 509 case Filter::FILTER_ERROR: { |
| 510 DVLOG(1) << __FUNCTION__ << "() " |
| 511 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 512 << " Filter Error"; |
505 filter_needs_more_output_space_ = false; | 513 filter_needs_more_output_space_ = false; |
506 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, | 514 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
507 ERR_CONTENT_DECODING_FAILED)); | 515 ERR_CONTENT_DECODING_FAILED)); |
508 rv = false; | 516 rv = false; |
509 break; | 517 break; |
510 } | 518 } |
511 default: { | 519 default: { |
512 NOTREACHED(); | 520 NOTREACHED(); |
513 filter_needs_more_output_space_ = false; | 521 filter_needs_more_output_space_ = false; |
514 rv = false; | 522 rv = false; |
515 break; | 523 break; |
516 } | 524 } |
517 } | 525 } |
| 526 DVLOG(2) << __FUNCTION__ << "() " |
| 527 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 528 << " rv = " << rv |
| 529 << " post bytes read = " << filtered_data_len |
| 530 << " pre total = " << prefilter_bytes_read_ |
| 531 << " post total = " |
| 532 << postfilter_bytes_read_; |
518 } else { | 533 } else { |
519 // we are done, or there is no data left. | 534 // we are done, or there is no data left. |
520 rv = true; | 535 rv = true; |
521 } | 536 } |
522 | 537 |
523 if (rv) { | 538 if (rv) { |
524 // When we successfully finished a read, we no longer need to | 539 // When we successfully finished a read, we no longer need to |
525 // save the caller's buffers. Release our reference. | 540 // save the caller's buffers. Release our reference. |
526 filtered_read_buffer_ = NULL; | 541 filtered_read_buffer_ = NULL; |
527 filtered_read_buffer_len_ = 0; | 542 filtered_read_buffer_len_ = 0; |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
590 void URLRequestJob::OnRawReadComplete(int bytes_read) { | 605 void URLRequestJob::OnRawReadComplete(int bytes_read) { |
591 DCHECK(raw_read_buffer_); | 606 DCHECK(raw_read_buffer_); |
592 if (bytes_read > 0) { | 607 if (bytes_read > 0) { |
593 RecordBytesRead(bytes_read); | 608 RecordBytesRead(bytes_read); |
594 } | 609 } |
595 raw_read_buffer_ = NULL; | 610 raw_read_buffer_ = NULL; |
596 } | 611 } |
597 | 612 |
598 void URLRequestJob::RecordBytesRead(int bytes_read) { | 613 void URLRequestJob::RecordBytesRead(int bytes_read) { |
599 filter_input_byte_count_ += bytes_read; | 614 filter_input_byte_count_ += bytes_read; |
| 615 prefilter_bytes_read_ += bytes_read; |
| 616 if (!filter_.get()) |
| 617 postfilter_bytes_read_ += bytes_read; |
| 618 DVLOG(2) << __FUNCTION__ << "() " |
| 619 << "\"" << (request_ ? request_->url().spec() : "???") << "\"" |
| 620 << " pre bytes read = " << bytes_read |
| 621 << " pre total = " << prefilter_bytes_read_ |
| 622 << " post total = " << postfilter_bytes_read_; |
600 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. | 623 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. |
601 const URLRequestContext* context = request_->context(); | 624 const URLRequestContext* context = request_->context(); |
602 if (context && context->network_delegate()) | 625 if (context && context->network_delegate()) |
603 context->network_delegate()->NotifyRawBytesRead(*request_, bytes_read); | 626 context->network_delegate()->NotifyRawBytesRead(*request_, bytes_read); |
604 } | 627 } |
605 | 628 |
606 bool URLRequestJob::FilterHasData() { | 629 bool URLRequestJob::FilterHasData() { |
607 return filter_.get() && filter_->stream_data_len(); | 630 return filter_.get() && filter_->stream_data_len(); |
608 } | 631 } |
609 | 632 |
610 void URLRequestJob::UpdatePacketReadTimes() { | 633 void URLRequestJob::UpdatePacketReadTimes() { |
611 } | 634 } |
612 | 635 |
613 } // namespace net | 636 } // namespace net |
OLD | NEW |