OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/download/download_file_impl.h" | 5 #include "content/browser/download/download_file_impl.h" |
6 | 6 |
7 #include <string> | 7 #include <string> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
11 #include "base/files/file_util.h" | 11 #include "base/files/file_util.h" |
12 #include "base/memory/ptr_util.h" | 12 #include "base/memory/ptr_util.h" |
13 #include "base/strings/stringprintf.h" | 13 #include "base/strings/stringprintf.h" |
14 #include "base/time/time.h" | 14 #include "base/time/time.h" |
15 #include "base/values.h" | 15 #include "base/values.h" |
16 #include "content/browser/byte_stream.h" | 16 #include "content/browser/byte_stream.h" |
17 #include "content/browser/download/download_create_info.h" | 17 #include "content/browser/download/download_create_info.h" |
18 #include "content/browser/download/download_destination_observer.h" | 18 #include "content/browser/download/download_destination_observer.h" |
19 #include "content/browser/download/download_interrupt_reasons_impl.h" | 19 #include "content/browser/download/download_interrupt_reasons_impl.h" |
20 #include "content/browser/download/download_net_log_parameters.h" | 20 #include "content/browser/download/download_net_log_parameters.h" |
21 #include "content/browser/download/download_stats.h" | 21 #include "content/browser/download/download_stats.h" |
22 #include "content/browser/download/parallel_download_utils.h" | |
22 #include "content/public/browser/browser_thread.h" | 23 #include "content/public/browser/browser_thread.h" |
23 #include "crypto/secure_hash.h" | 24 #include "crypto/secure_hash.h" |
24 #include "crypto/sha2.h" | 25 #include "crypto/sha2.h" |
25 #include "net/base/io_buffer.h" | 26 #include "net/base/io_buffer.h" |
26 #include "net/log/net_log.h" | 27 #include "net/log/net_log.h" |
27 #include "net/log/net_log_event_type.h" | 28 #include "net/log/net_log_event_type.h" |
28 #include "net/log/net_log_source.h" | 29 #include "net/log/net_log_source.h" |
29 #include "net/log/net_log_source_type.h" | 30 #include "net/log/net_log_source_type.h" |
30 | 31 |
31 namespace content { | 32 namespace content { |
32 | 33 |
33 const int kUpdatePeriodMs = 500; | 34 const int kUpdatePeriodMs = 500; |
34 const int kMaxTimeBlockingFileThreadMs = 1000; | 35 const int kMaxTimeBlockingFileThreadMs = 1000; |
35 | 36 |
36 // These constants control the default retry behavior for failing renames. Each | 37 // These constants control the default retry behavior for failing renames. Each |
37 // retry is performed after a delay that is twice the previous delay. The | 38 // retry is performed after a delay that is twice the previous delay. The |
38 // initial delay is specified by kInitialRenameRetryDelayMs. | 39 // initial delay is specified by kInitialRenameRetryDelayMs. |
39 const int kInitialRenameRetryDelayMs = 200; | 40 const int kInitialRenameRetryDelayMs = 200; |
40 | 41 |
41 // Number of times a failing rename is retried before giving up. | 42 // Number of times a failing rename is retried before giving up. |
42 const int kMaxRenameRetries = 3; | 43 const int kMaxRenameRetries = 3; |
43 | 44 |
44 DownloadFileImpl::SourceStream::SourceStream(int64_t offset, int64_t length) | 45 DownloadFileImpl::SourceStream::SourceStream(int64_t offset, int64_t length) |
45 : offset_(offset), length_(length), bytes_written_(0), finished_(false) {} | 46 : offset_(offset), |
47 length_(length), | |
48 bytes_written_(0), | |
49 finished_(false), | |
50 index_(0u) {} | |
46 | 51 |
47 DownloadFileImpl::SourceStream::~SourceStream() = default; | 52 DownloadFileImpl::SourceStream::~SourceStream() = default; |
48 | 53 |
49 void DownloadFileImpl::SourceStream::SetByteStream( | 54 void DownloadFileImpl::SourceStream::SetByteStream( |
50 std::unique_ptr<ByteStreamReader> stream_reader) { | 55 std::unique_ptr<ByteStreamReader> stream_reader) { |
51 stream_reader_ = std::move(stream_reader); | 56 stream_reader_ = std::move(stream_reader); |
52 } | 57 } |
53 | 58 |
54 void DownloadFileImpl::SourceStream::OnWriteBytesToDisk(int64_t bytes_write) { | 59 void DownloadFileImpl::SourceStream::OnWriteBytesToDisk(int64_t bytes_write) { |
55 bytes_written_ += bytes_write; | 60 bytes_written_ += bytes_write; |
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
305 should_terminate = true; | 310 should_terminate = true; |
306 incoming_data_size = | 311 incoming_data_size = |
307 source_stream->length() - source_stream->bytes_written(); | 312 source_stream->length() - source_stream->bytes_written(); |
308 } | 313 } |
309 reason = WriteDataToFile( | 314 reason = WriteDataToFile( |
310 source_stream->offset() + source_stream->bytes_written(), | 315 source_stream->offset() + source_stream->bytes_written(), |
311 incoming_data.get()->data(), incoming_data_size); | 316 incoming_data.get()->data(), incoming_data_size); |
312 disk_writes_time_ += (base::TimeTicks::Now() - write_start); | 317 disk_writes_time_ += (base::TimeTicks::Now() - write_start); |
313 bytes_seen_ += incoming_data_size; | 318 bytes_seen_ += incoming_data_size; |
314 total_incoming_data_size += incoming_data_size; | 319 total_incoming_data_size += incoming_data_size; |
315 if (reason == DOWNLOAD_INTERRUPT_REASON_NONE) | 320 if (reason == DOWNLOAD_INTERRUPT_REASON_NONE) { |
321 int64_t prev_bytes_written = source_stream->bytes_written(); | |
316 source_stream->OnWriteBytesToDisk(incoming_data_size); | 322 source_stream->OnWriteBytesToDisk(incoming_data_size); |
323 // If the write operation creates a new slice, add it to the | |
David Trainor- moved to gerrit
2017/03/09 17:44:02
Move this comment after the if (!is_sparse_file_)?
qinmin
2017/03/09 21:44:58
Done.
| |
324 // |received_slices_| and update all the entries in | |
325 // |source_streams_|. | |
326 if (!is_sparse_file_) | |
David Trainor- moved to gerrit
2017/03/09 17:44:02
Starting to wonder if in the future we should cons
qinmin
2017/03/09 21:44:58
updated the TODO in the header file. There could s
| |
327 break; | |
328 if (incoming_data_size > 0 && prev_bytes_written == 0) { | |
329 AddNewSlice(source_stream->offset(), incoming_data_size); | |
330 } else { | |
331 received_slices_[source_stream->index()].received_bytes += | |
David Trainor- moved to gerrit
2017/03/09 17:44:02
Should the stream just hold a reference to the ass
qinmin
2017/03/09 21:44:58
Hold a pointer/reference is not safe when the |rec
| |
332 incoming_data_size; | |
333 } | |
334 } | |
317 } | 335 } |
318 break; | 336 break; |
319 case ByteStreamReader::STREAM_COMPLETE: | 337 case ByteStreamReader::STREAM_COMPLETE: |
320 { | 338 { |
321 reason = static_cast<DownloadInterruptReason>( | 339 reason = static_cast<DownloadInterruptReason>( |
322 source_stream->stream_reader()->GetStatus()); | 340 source_stream->stream_reader()->GetStatus()); |
323 SendUpdate(); | 341 SendUpdate(); |
324 } | 342 } |
325 break; | 343 break; |
326 default: | 344 default: |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
426 | 444 |
427 void DownloadFileImpl::WillWriteToDisk(size_t data_len) { | 445 void DownloadFileImpl::WillWriteToDisk(size_t data_len) { |
428 if (!update_timer_->IsRunning()) { | 446 if (!update_timer_->IsRunning()) { |
429 update_timer_->Start(FROM_HERE, | 447 update_timer_->Start(FROM_HERE, |
430 base::TimeDelta::FromMilliseconds(kUpdatePeriodMs), | 448 base::TimeDelta::FromMilliseconds(kUpdatePeriodMs), |
431 this, &DownloadFileImpl::SendUpdate); | 449 this, &DownloadFileImpl::SendUpdate); |
432 } | 450 } |
433 rate_estimator_.Increment(data_len); | 451 rate_estimator_.Increment(data_len); |
434 } | 452 } |
435 | 453 |
454 void DownloadFileImpl::AddNewSlice(int64_t offset, int64_t length) { | |
455 if (!is_sparse_file_) | |
456 return; | |
457 size_t index = AddOrMergeReceivedSliceIntoSortedArray( | |
458 DownloadItem::ReceivedSlice(offset, length), received_slices_); | |
459 // Check if the slice is added as a new slice, or merged with an existing one. | |
460 bool slice_added = (offset == received_slices_[index].offset); | |
461 // Update the index of exising SourceStreams. | |
462 for (auto& stream : source_streams_) { | |
463 SourceStream* source_stream = stream.second.get(); | |
464 if (source_stream->offset() > offset) { | |
465 if (slice_added && source_stream->bytes_written() > 0) | |
466 source_stream->set_index(source_stream->index() + 1); | |
467 } else if (source_stream->offset() == offset) { | |
468 source_stream->set_index(index); | |
469 } else if (source_stream->length() == | |
470 DownloadSaveInfo::kLengthFullContent || | |
471 source_stream->length() > offset - source_stream->offset()) { | |
472 // The newly introduced slice will impact the length of the SourceStreams | |
473 // preceding it. | |
474 source_stream->set_length(offset - source_stream->offset()); | |
475 } | |
476 } | |
477 } | |
478 | |
436 DownloadFileImpl::RenameParameters::RenameParameters( | 479 DownloadFileImpl::RenameParameters::RenameParameters( |
437 RenameOption option, | 480 RenameOption option, |
438 const base::FilePath& new_path, | 481 const base::FilePath& new_path, |
439 const RenameCompletionCallback& completion_callback) | 482 const RenameCompletionCallback& completion_callback) |
440 : option(option), | 483 : option(option), |
441 new_path(new_path), | 484 new_path(new_path), |
442 retries_left(kMaxRenameRetries), | 485 retries_left(kMaxRenameRetries), |
443 completion_callback(completion_callback) {} | 486 completion_callback(completion_callback) {} |
444 | 487 |
445 DownloadFileImpl::RenameParameters::~RenameParameters() {} | 488 DownloadFileImpl::RenameParameters::~RenameParameters() {} |
446 | 489 |
447 } // namespace content | 490 } // namespace content |
OLD | NEW |