Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2017 The Chromium Authors. All rights reserved. | 1 // Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ | 5 #ifndef CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ |
| 6 #define CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ | 6 #define CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ |
| 7 | 7 |
| 8 #include <memory> | 8 #include <memory> |
| 9 #include <unordered_map> | |
| 9 #include <vector> | 10 #include <vector> |
| 10 | 11 |
| 11 #include "base/macros.h" | 12 #include "base/macros.h" |
| 12 #include "content/browser/download/download_job_impl.h" | 13 #include "content/browser/download/download_job_impl.h" |
| 13 #include "content/browser/download/download_worker.h" | 14 #include "content/browser/download/download_worker.h" |
| 14 #include "content/common/content_export.h" | 15 #include "content/common/content_export.h" |
| 15 | 16 |
| 16 namespace content { | 17 namespace content { |
| 17 | 18 |
| 18 // DownloadJob that can create concurrent range requests to fetch different | 19 // DownloadJob that can create concurrent range requests to fetch different |
| 19 // parts of the file. | 20 // parts of the file. |
| 20 // The original request is hold in base class DownloadUrlJob. | 21 // The original request is hold in base class. |
| 21 class CONTENT_EXPORT ParallelDownloadJob : public DownloadJobImpl { | 22 class CONTENT_EXPORT ParallelDownloadJob : public DownloadJobImpl, |
| 23 public DownloadWorker::Delegate { | |
| 22 public: | 24 public: |
| 23 ParallelDownloadJob( | 25 ParallelDownloadJob( |
| 24 DownloadItemImpl* download_item, | 26 DownloadItemImpl* download_item, |
| 25 std::unique_ptr<DownloadRequestHandleInterface> request_handle, | 27 std::unique_ptr<DownloadRequestHandleInterface> request_handle, |
| 26 const DownloadCreateInfo& create_info); | 28 const DownloadCreateInfo& create_info); |
| 27 ~ParallelDownloadJob() override; | 29 ~ParallelDownloadJob() override; |
| 28 | 30 |
| 29 // DownloadUrlJob implementation. | 31 // DownloadJobImpl implementation. |
| 30 void Start() override; | 32 void Start() override; |
| 31 void Cancel(bool user_cancel) override; | 33 void Cancel(bool user_cancel) override; |
| 32 void Pause() override; | 34 void Pause() override; |
| 33 void Resume(bool resume_request) override; | 35 void Resume(bool resume_request) override; |
| 34 | 36 |
| 37 protected: | |
| 38 // Virtual for testing. | |
| 39 virtual int GetParallelRequestCount() const; | |
| 40 | |
| 35 private: | 41 private: |
| 36 friend class ParallelDownloadJobTest; | 42 friend class ParallelDownloadJobTest; |
| 37 | 43 |
| 38 typedef std::vector<std::unique_ptr<DownloadWorker>> WorkerList; | 44 typedef std::unordered_map<int64_t, std::unique_ptr<DownloadWorker>> |
|
qinmin
2017/03/11 06:08:22
nit: using WorkerMap = std::...
xingliu
2017/03/13 17:53:17
Done.
| |
| 45 WorkerMap; | |
| 39 | 46 |
| 40 // Build multiple http requests for a new download, | 47 // DownloadWorker::Delegate implementation. |
| 41 // the rest of the bytes starting from |bytes_received| will be equally | 48 void OnByteStreamReady( |
| 42 // distributed to each connection, including the original connection. | 49 DownloadWorker* worker, |
| 43 // the last connection may take additional bytes. | 50 std::unique_ptr<ByteStreamReader> stream_reader) override; |
| 44 void ForkRequestsForNewDownload(int64_t bytes_received, | |
| 45 int64_t total_bytes, | |
| 46 int request_count); | |
| 47 | 51 |
| 48 // Build parallel requests to download the remaining slices. | 52 // Build parallel requests to download. This function is the entry point for |
| 49 // TODO(qinmin): remove ForkRequestsForNewDownload() and move the logic into | 53 // all parallel downloads. |
| 50 // this function. | |
| 51 void BuildParallelRequests(); | 54 void BuildParallelRequests(); |
| 52 | 55 |
| 56 // Build one http request for each slice from the second slice. | |
| 57 // The first slice represents the original request. | |
| 58 void ForkSubRequests(const DownloadItem::ReceivedSlices& slices_to_download); | |
| 59 | |
| 53 // Create one range request, virtual for testing. | 60 // Create one range request, virtual for testing. |
| 54 virtual void CreateRequest(int64_t offset, int64_t length); | 61 virtual void CreateRequest(int64_t offset, int64_t length); |
| 55 | 62 |
| 56 // Information about the initial request when download is started. | 63 // Information about the initial request when download is started. |
| 57 int64_t initial_request_offset_; | 64 int64_t initial_request_offset_; |
| 58 int64_t initial_request_length_; | |
| 59 | 65 |
| 60 // Subsequent tasks to send range requests. | 66 // The content length of the target resource, read from the response header. |
| 61 WorkerList workers_; | 67 // The actual length of the target file may be different from the |
| 68 // content-length header. | |
| 69 int64_t content_length_; | |
| 70 | |
| 71 // Map from the offset position of the slice to the worker that downloads the | |
| 72 // slice. | |
| 73 WorkerMap workers_; | |
| 62 | 74 |
| 63 DISALLOW_COPY_AND_ASSIGN(ParallelDownloadJob); | 75 DISALLOW_COPY_AND_ASSIGN(ParallelDownloadJob); |
| 64 }; | 76 }; |
| 65 | 77 |
| 66 } // namespace content | 78 } // namespace content |
| 67 | 79 |
| 68 #endif // CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ | 80 #endif // CONTENT_BROWSER_DOWNLOAD_PARALLEL_DOWNLOAD_JOB_H_ |
| OLD | NEW |