Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(375)

Side by Side Diff: net/url_request/url_request_job.cc

Issue 3010037: Add the actual data being read to the OnBytesRead callback, take two.... (Closed) Base URL: http://src.chromium.org/svn/trunk/src/
Patch Set: '' Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « net/url_request/url_request_job.h ('k') | net/url_request/url_request_job_tracker.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "net/url_request/url_request_job.h" 5 #include "net/url_request/url_request_job.h"
6 6
7 #include "base/histogram.h" 7 #include "base/histogram.h"
8 #include "base/message_loop.h" 8 #include "base/message_loop.h"
9 #include "base/string_number_conversions.h" 9 #include "base/string_number_conversions.h"
10 #include "base/string_util.h" 10 #include "base/string_util.h"
(...skipping 15 matching lines...) Expand all
26 const int URLRequestJob::kFilterBufSize = 32 * 1024; 26 const int URLRequestJob::kFilterBufSize = 32 * 1024;
27 27
28 URLRequestJob::URLRequestJob(URLRequest* request) 28 URLRequestJob::URLRequestJob(URLRequest* request)
29 : request_(request), 29 : request_(request),
30 prefilter_bytes_read_(0), 30 prefilter_bytes_read_(0),
31 postfilter_bytes_read_(0), 31 postfilter_bytes_read_(0),
32 is_compressible_content_(false), 32 is_compressible_content_(false),
33 is_compressed_(false), 33 is_compressed_(false),
34 done_(false), 34 done_(false),
35 filter_needs_more_output_space_(false), 35 filter_needs_more_output_space_(false),
36 read_buffer_len_(0), 36 filtered_read_buffer_len_(0),
37 has_handled_response_(false), 37 has_handled_response_(false),
38 expected_content_size_(-1), 38 expected_content_size_(-1),
39 deferred_redirect_status_code_(-1), 39 deferred_redirect_status_code_(-1),
40 packet_timing_enabled_(false), 40 packet_timing_enabled_(false),
41 filter_input_byte_count_(0), 41 filter_input_byte_count_(0),
42 bytes_observed_in_packets_(0), 42 bytes_observed_in_packets_(0),
43 max_packets_timed_(0), 43 max_packets_timed_(0),
44 observed_packet_count_(0) { 44 observed_packet_count_(0) {
45 load_flags_ = request_->load_flags(); 45 load_flags_ = request_->load_flags();
46 is_profiling_ = request->enable_profiling(); 46 is_profiling_ = request->enable_profiling();
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 191
192 // This function calls ReadData to get stream data. If a filter exists, passes 192 // This function calls ReadData to get stream data. If a filter exists, passes
193 // the data to the attached filter. Then returns the output from filter back to 193 // the data to the attached filter. Then returns the output from filter back to
194 // the caller. 194 // the caller.
195 bool URLRequestJob::Read(net::IOBuffer* buf, int buf_size, int *bytes_read) { 195 bool URLRequestJob::Read(net::IOBuffer* buf, int buf_size, int *bytes_read) {
196 bool rv = false; 196 bool rv = false;
197 197
198 DCHECK_LT(buf_size, 1000000); // sanity check 198 DCHECK_LT(buf_size, 1000000); // sanity check
199 DCHECK(buf); 199 DCHECK(buf);
200 DCHECK(bytes_read); 200 DCHECK(bytes_read);
201 DCHECK(filtered_read_buffer_ == NULL);
202 DCHECK_EQ(0, filtered_read_buffer_len_);
201 203
202 *bytes_read = 0; 204 *bytes_read = 0;
203 205
204 // Skip Filter if not present 206 // Skip Filter if not present
205 if (!filter_.get()) { 207 if (!filter_.get()) {
206 rv = ReadRawData(buf, buf_size, bytes_read); 208 rv = ReadRawDataHelper(buf, buf_size, bytes_read);
207 if (rv && *bytes_read > 0)
208 RecordBytesRead(*bytes_read);
209 } else { 209 } else {
210 // Save the caller's buffers while we do IO 210 // Save the caller's buffers while we do IO
211 // in the filter's buffers. 211 // in the filter's buffers.
212 read_buffer_ = buf; 212 filtered_read_buffer_ = buf;
213 read_buffer_len_ = buf_size; 213 filtered_read_buffer_len_ = buf_size;
214 214
215 if (ReadFilteredData(bytes_read)) { 215 if (ReadFilteredData(bytes_read)) {
216 rv = true; // we have data to return 216 rv = true; // we have data to return
217 } else { 217 } else {
218 rv = false; // error, or a new IO is pending 218 rv = false; // error, or a new IO is pending
219 } 219 }
220 } 220 }
221 if (rv && *bytes_read == 0) 221 if (rv && *bytes_read == 0)
222 NotifyDone(URLRequestStatus()); 222 NotifyDone(URLRequestStatus());
223 return rv; 223 return rv;
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 DCHECK(filter_.get()); 266 DCHECK(filter_.get());
267 267
268 *bytes_read = 0; 268 *bytes_read = 0;
269 269
270 // Get more pre-filtered data if needed. 270 // Get more pre-filtered data if needed.
271 // TODO(mbelshe): is it possible that the filter needs *MORE* data 271 // TODO(mbelshe): is it possible that the filter needs *MORE* data
272 // when there is some data already in the buffer? 272 // when there is some data already in the buffer?
273 if (!filter_->stream_data_len() && !is_done()) { 273 if (!filter_->stream_data_len() && !is_done()) {
274 net::IOBuffer* stream_buffer = filter_->stream_buffer(); 274 net::IOBuffer* stream_buffer = filter_->stream_buffer();
275 int stream_buffer_size = filter_->stream_buffer_size(); 275 int stream_buffer_size = filter_->stream_buffer_size();
276 rv = ReadRawData(stream_buffer, stream_buffer_size, bytes_read); 276 rv = ReadRawDataHelper(stream_buffer, stream_buffer_size, bytes_read);
277 if (rv && *bytes_read > 0)
278 RecordBytesRead(*bytes_read);
279 } 277 }
280 return rv; 278 return rv;
281 } 279 }
282 280
283 void URLRequestJob::FollowRedirect(const GURL& location, int http_status_code) { 281 void URLRequestJob::FollowRedirect(const GURL& location, int http_status_code) {
284 g_url_request_job_tracker.OnJobRedirect(this, location, http_status_code); 282 g_url_request_job_tracker.OnJobRedirect(this, location, http_status_code);
285 283
286 int rv = request_->Redirect(location, http_status_code); 284 int rv = request_->Redirect(location, http_status_code);
287 if (rv != net::OK) 285 if (rv != net::OK)
288 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); 286 NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv));
289 } 287 }
290 288
291 void URLRequestJob::FilteredDataRead(int bytes_read) { 289 void URLRequestJob::FilteredDataRead(int bytes_read) {
292 DCHECK(filter_.get()); // don't add data if there is no filter 290 DCHECK(filter_.get()); // don't add data if there is no filter
293 filter_->FlushStreamBuffer(bytes_read); 291 filter_->FlushStreamBuffer(bytes_read);
294 } 292 }
295 293
296 bool URLRequestJob::ReadFilteredData(int* bytes_read) { 294 bool URLRequestJob::ReadFilteredData(int* bytes_read) {
297 DCHECK(filter_.get()); // don't add data if there is no filter 295 DCHECK(filter_.get()); // don't add data if there is no filter
298 DCHECK(read_buffer_ != NULL); // we need to have a buffer to fill 296 DCHECK(filtered_read_buffer_ != NULL); // we need to have a buffer to fill
299 DCHECK_GT(read_buffer_len_, 0); // sanity check 297 DCHECK_GT(filtered_read_buffer_len_, 0); // sanity check
300 DCHECK_LT(read_buffer_len_, 1000000); // sanity check 298 DCHECK_LT(filtered_read_buffer_len_, 1000000); // sanity check
299 DCHECK(raw_read_buffer_ == NULL); // there should be no raw read buffer yet
301 300
302 bool rv = false; 301 bool rv = false;
303 *bytes_read = 0; 302 *bytes_read = 0;
304 303
305 if (is_done()) 304 if (is_done())
306 return true; 305 return true;
307 306
308 if (!filter_needs_more_output_space_ && !filter_->stream_data_len()) { 307 if (!filter_needs_more_output_space_ && !filter_->stream_data_len()) {
309 // We don't have any raw data to work with, so 308 // We don't have any raw data to work with, so
310 // read from the socket. 309 // read from the socket.
311 int filtered_data_read; 310 int filtered_data_read;
312 if (ReadRawDataForFilter(&filtered_data_read)) { 311 if (ReadRawDataForFilter(&filtered_data_read)) {
313 if (filtered_data_read > 0) { 312 if (filtered_data_read > 0) {
314 filter_->FlushStreamBuffer(filtered_data_read); // Give data to filter. 313 filter_->FlushStreamBuffer(filtered_data_read); // Give data to filter.
315 } else { 314 } else {
316 return true; // EOF 315 return true; // EOF
317 } 316 }
318 } else { 317 } else {
319 return false; // IO Pending (or error) 318 return false; // IO Pending (or error)
320 } 319 }
321 } 320 }
322 321
323 if ((filter_->stream_data_len() || filter_needs_more_output_space_) 322 if ((filter_->stream_data_len() || filter_needs_more_output_space_)
324 && !is_done()) { 323 && !is_done()) {
325 // Get filtered data. 324 // Get filtered data.
326 int filtered_data_len = read_buffer_len_; 325 int filtered_data_len = filtered_read_buffer_len_;
327 Filter::FilterStatus status; 326 Filter::FilterStatus status;
328 int output_buffer_size = filtered_data_len; 327 int output_buffer_size = filtered_data_len;
329 status = filter_->ReadData(read_buffer_->data(), &filtered_data_len); 328 status = filter_->ReadData(filtered_read_buffer_->data(),
329 &filtered_data_len);
330 330
331 if (filter_needs_more_output_space_ && 0 == filtered_data_len) { 331 if (filter_needs_more_output_space_ && 0 == filtered_data_len) {
332 // filter_needs_more_output_space_ was mistaken... there are no more bytes 332 // filter_needs_more_output_space_ was mistaken... there are no more bytes
333 // and we should have at least tried to fill up the filter's input buffer. 333 // and we should have at least tried to fill up the filter's input buffer.
334 // Correct the state, and try again. 334 // Correct the state, and try again.
335 filter_needs_more_output_space_ = false; 335 filter_needs_more_output_space_ = false;
336 return ReadFilteredData(bytes_read); 336 return ReadFilteredData(bytes_read);
337 } 337 }
338 338
339 switch (status) { 339 switch (status) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
384 } 384 }
385 } 385 }
386 } else { 386 } else {
387 // we are done, or there is no data left. 387 // we are done, or there is no data left.
388 rv = true; 388 rv = true;
389 } 389 }
390 390
391 if (rv) { 391 if (rv) {
392 // When we successfully finished a read, we no longer need to 392 // When we successfully finished a read, we no longer need to
393 // save the caller's buffers. Release our reference. 393 // save the caller's buffers. Release our reference.
394 read_buffer_ = NULL; 394 filtered_read_buffer_ = NULL;
395 read_buffer_len_ = 0; 395 filtered_read_buffer_len_ = 0;
396 }
397 return rv;
398 }
399
400 bool URLRequestJob::ReadRawDataHelper(net::IOBuffer* buf, int buf_size,
401 int* bytes_read) {
402 DCHECK(!request_->status().is_io_pending());
403 DCHECK(raw_read_buffer_ == NULL);
404
405 // Keep a pointer to the read buffer, so we have access to it in the
406 // OnRawReadComplete() callback in the event that the read completes
407 // asynchronously.
408 raw_read_buffer_ = buf;
409 bool rv = ReadRawData(buf, buf_size, bytes_read);
410
411 if (!request_->status().is_io_pending()) {
412 // If the read completes synchronously, either success or failure,
413 // invoke the OnRawReadComplete callback so we can account for the
414 // completed read.
415 OnRawReadComplete(*bytes_read);
396 } 416 }
397 return rv; 417 return rv;
398 } 418 }
399 419
400 bool URLRequestJob::ReadRawData(net::IOBuffer* buf, int buf_size, 420 bool URLRequestJob::ReadRawData(net::IOBuffer* buf, int buf_size,
401 int *bytes_read) { 421 int *bytes_read) {
402 DCHECK(bytes_read); 422 DCHECK(bytes_read);
403 *bytes_read = 0; 423 *bytes_read = 0;
404 NotifyDone(URLRequestStatus()); 424 NotifyDone(URLRequestStatus());
405 return false; 425 return false;
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
520 if (!request_ || !request_->delegate()) 540 if (!request_ || !request_->delegate())
521 return; // The request was destroyed, so there is no more work to do. 541 return; // The request was destroyed, so there is no more work to do.
522 542
523 // TODO(darin): Bug 1004233. Re-enable this test once all of the chrome 543 // TODO(darin): Bug 1004233. Re-enable this test once all of the chrome
524 // unit_tests have been fixed to not trip this. 544 // unit_tests have been fixed to not trip this.
525 //DCHECK(!request_->status().is_io_pending()); 545 //DCHECK(!request_->status().is_io_pending());
526 546
527 // The headers should be complete before reads complete 547 // The headers should be complete before reads complete
528 DCHECK(has_handled_response_); 548 DCHECK(has_handled_response_);
529 549
530 if (bytes_read > 0) 550 OnRawReadComplete(bytes_read);
531 RecordBytesRead(bytes_read);
532 551
533 // Don't notify if we had an error. 552 // Don't notify if we had an error.
534 if (!request_->status().is_success()) 553 if (!request_->status().is_success())
535 return; 554 return;
536 555
537 // When notifying the delegate, the delegate can release the request 556 // When notifying the delegate, the delegate can release the request
538 // (and thus release 'this'). After calling to the delgate, we must 557 // (and thus release 'this'). After calling to the delgate, we must
539 // check the request pointer to see if it still exists, and return 558 // check the request pointer to see if it still exists, and return
540 // immediately if it has been destroyed. self_preservation ensures our 559 // immediately if it has been destroyed. self_preservation ensures our
541 // survival until we can get out of this method. 560 // survival until we can get out of this method.
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
634 void URLRequestJob::NotifyRestartRequired() { 653 void URLRequestJob::NotifyRestartRequired() {
635 DCHECK(!has_handled_response_); 654 DCHECK(!has_handled_response_);
636 if (GetStatus().status() != URLRequestStatus::CANCELED) 655 if (GetStatus().status() != URLRequestStatus::CANCELED)
637 request_->Restart(); 656 request_->Restart();
638 } 657 }
639 658
640 bool URLRequestJob::FilterHasData() { 659 bool URLRequestJob::FilterHasData() {
641 return filter_.get() && filter_->stream_data_len(); 660 return filter_.get() && filter_->stream_data_len();
642 } 661 }
643 662
663 void URLRequestJob::OnRawReadComplete(int bytes_read) {
664 DCHECK(raw_read_buffer_);
665 if (bytes_read > 0) {
666 RecordBytesRead(bytes_read);
667 }
668 raw_read_buffer_ = NULL;
669 }
670
644 void URLRequestJob::RecordBytesRead(int bytes_read) { 671 void URLRequestJob::RecordBytesRead(int bytes_read) {
645 if (is_profiling()) { 672 if (is_profiling()) {
646 ++(metrics_->number_of_read_IO_); 673 ++(metrics_->number_of_read_IO_);
647 metrics_->total_bytes_read_ += bytes_read; 674 metrics_->total_bytes_read_ += bytes_read;
648 } 675 }
649 filter_input_byte_count_ += bytes_read; 676 filter_input_byte_count_ += bytes_read;
650 UpdatePacketReadTimes(); // Facilitate stats recording if it is active. 677 UpdatePacketReadTimes(); // Facilitate stats recording if it is active.
651 g_url_request_job_tracker.OnBytesRead(this, bytes_read); 678 g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(),
679 bytes_read);
652 } 680 }
653 681
654 const URLRequestStatus URLRequestJob::GetStatus() { 682 const URLRequestStatus URLRequestJob::GetStatus() {
655 if (request_) 683 if (request_)
656 return request_->status(); 684 return request_->status();
657 // If the request is gone, we must be cancelled. 685 // If the request is gone, we must be cancelled.
658 return URLRequestStatus(URLRequestStatus::CANCELED, 686 return URLRequestStatus(URLRequestStatus::CANCELED,
659 net::ERR_ABORTED); 687 net::ERR_ABORTED);
660 } 688 }
661 689
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
895 return; 923 return;
896 } 924 }
897 925
898 if (is_compressed_) { 926 if (is_compressed_) {
899 COMPRESSION_HISTOGRAM("NoProxy.BytesBeforeCompression", compressed_B); 927 COMPRESSION_HISTOGRAM("NoProxy.BytesBeforeCompression", compressed_B);
900 COMPRESSION_HISTOGRAM("NoProxy.BytesAfterCompression", decompressed_B); 928 COMPRESSION_HISTOGRAM("NoProxy.BytesAfterCompression", decompressed_B);
901 } else { 929 } else {
902 COMPRESSION_HISTOGRAM("NoProxy.ShouldHaveBeenCompressed", decompressed_B); 930 COMPRESSION_HISTOGRAM("NoProxy.ShouldHaveBeenCompressed", decompressed_B);
903 } 931 }
904 } 932 }
OLDNEW
« no previous file with comments | « net/url_request/url_request_job.h ('k') | net/url_request/url_request_job_tracker.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698