OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/net/url_request_slow_download_job.h" | 5 #include "content/browser/net/url_request_slow_download_job.h" |
6 | 6 |
7 #include "base/compiler_specific.h" | 7 #include "base/compiler_specific.h" |
8 #include "base/message_loop.h" | 8 #include "base/message_loop.h" |
9 #include "base/stringprintf.h" | 9 #include "base/stringprintf.h" |
10 #include "base/string_util.h" | 10 #include "base/string_util.h" |
11 #include "googleurl/src/gurl.h" | 11 #include "googleurl/src/gurl.h" |
12 #include "net/base/io_buffer.h" | 12 #include "net/base/io_buffer.h" |
13 #include "net/base/net_errors.h" | |
14 #include "net/http/http_response_headers.h" | 13 #include "net/http/http_response_headers.h" |
15 #include "net/url_request/url_request.h" | 14 #include "net/url_request/url_request.h" |
16 #include "net/url_request/url_request_filter.h" | 15 #include "net/url_request/url_request_filter.h" |
17 #include "net/url_request/url_request_status.h" | |
18 | 16 |
19 const int kFirstDownloadSize = 1024 * 35; | 17 const int kFirstDownloadSize = 1024 * 35; |
20 const int kSecondDownloadSize = 1024 * 10; | 18 const int kSecondDownloadSize = 1024 * 10; |
21 | 19 |
22 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = | 20 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = |
23 "http://url.handled.by.slow.download/download-unknown-size"; | 21 "http://url.handled.by.slow.download/download-unknown-size"; |
24 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = | 22 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = |
25 "http://url.handled.by.slow.download/download-known-size"; | 23 "http://url.handled.by.slow.download/download-known-size"; |
26 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = | 24 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = |
27 "http://url.handled.by.slow.download/download-finish"; | 25 "http://url.handled.by.slow.download/download-finish"; |
28 const char URLRequestSlowDownloadJob::kErrorFinishDownloadUrl[] = | |
29 "http://url.handled.by.slow.download/download-error"; | |
30 | 26 |
31 std::vector<URLRequestSlowDownloadJob*> | 27 std::vector<URLRequestSlowDownloadJob*> |
32 URLRequestSlowDownloadJob::pending_requests_; | 28 URLRequestSlowDownloadJob::kPendingRequests; |
33 | |
34 // Return whether this is the finish or error URL. | |
35 static bool IsCompletionUrl(const GURL& url) { | |
36 if (url.spec() == URLRequestSlowDownloadJob::kFinishDownloadUrl) | |
37 return true; | |
38 return (url.spec() == URLRequestSlowDownloadJob::kErrorFinishDownloadUrl); | |
39 } | |
40 | 29 |
41 void URLRequestSlowDownloadJob::Start() { | 30 void URLRequestSlowDownloadJob::Start() { |
42 MessageLoop::current()->PostTask( | 31 MessageLoop::current()->PostTask( |
43 FROM_HERE, | 32 FROM_HERE, |
44 method_factory_.NewRunnableMethod( | 33 method_factory_.NewRunnableMethod( |
45 &URLRequestSlowDownloadJob::StartAsync)); | 34 &URLRequestSlowDownloadJob::StartAsync)); |
46 } | 35 } |
47 | 36 |
48 // static | 37 // static |
49 void URLRequestSlowDownloadJob::AddUrlHandler() { | 38 void URLRequestSlowDownloadJob::AddUrlHandler() { |
50 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); | 39 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); |
51 filter->AddUrlHandler(GURL(kUnknownSizeUrl), | 40 filter->AddUrlHandler(GURL(kUnknownSizeUrl), |
52 &URLRequestSlowDownloadJob::Factory); | 41 &URLRequestSlowDownloadJob::Factory); |
53 filter->AddUrlHandler(GURL(kKnownSizeUrl), | 42 filter->AddUrlHandler(GURL(kKnownSizeUrl), |
54 &URLRequestSlowDownloadJob::Factory); | 43 &URLRequestSlowDownloadJob::Factory); |
55 filter->AddUrlHandler(GURL(kFinishDownloadUrl), | 44 filter->AddUrlHandler(GURL(kFinishDownloadUrl), |
56 &URLRequestSlowDownloadJob::Factory); | 45 &URLRequestSlowDownloadJob::Factory); |
57 filter->AddUrlHandler(GURL(kErrorFinishDownloadUrl), | |
58 &URLRequestSlowDownloadJob::Factory); | |
59 } | 46 } |
60 | 47 |
61 /*static */ | 48 /*static */ |
62 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( | 49 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( |
63 net::URLRequest* request, | 50 net::URLRequest* request, |
64 const std::string& scheme) { | 51 const std::string& scheme) { |
65 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request); | 52 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request); |
66 if (!IsCompletionUrl(request->url())) | 53 if (request->url().spec() != kFinishDownloadUrl) |
67 URLRequestSlowDownloadJob::pending_requests_.push_back(job); | 54 URLRequestSlowDownloadJob::kPendingRequests.push_back(job); |
68 return job; | 55 return job; |
69 } | 56 } |
70 | 57 |
71 /* static */ | 58 /* static */ |
72 void URLRequestSlowDownloadJob::FinishPendingRequests(bool error) { | 59 void URLRequestSlowDownloadJob::FinishPendingRequests() { |
73 typedef std::vector<URLRequestSlowDownloadJob*> JobList; | 60 typedef std::vector<URLRequestSlowDownloadJob*> JobList; |
74 for (JobList::iterator it = pending_requests_.begin(); it != | 61 for (JobList::iterator it = kPendingRequests.begin(); it != |
75 pending_requests_.end(); ++it) { | 62 kPendingRequests.end(); ++it) { |
76 if (error) { | 63 (*it)->set_should_finish_download(); |
77 (*it)->set_should_error_download(); | |
78 } else { | |
79 (*it)->set_should_finish_download(); | |
80 } | |
81 } | 64 } |
82 pending_requests_.clear(); | 65 kPendingRequests.clear(); |
83 } | 66 } |
84 | 67 |
85 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) | 68 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) |
86 : net::URLRequestJob(request), | 69 : net::URLRequestJob(request), |
87 first_download_size_remaining_(kFirstDownloadSize), | 70 first_download_size_remaining_(kFirstDownloadSize), |
88 should_finish_download_(false), | 71 should_finish_download_(false), |
89 should_send_second_chunk_(false), | 72 should_send_second_chunk_(false), |
90 should_error_download_(false), | |
91 ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {} | 73 ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {} |
92 | 74 |
93 void URLRequestSlowDownloadJob::StartAsync() { | 75 void URLRequestSlowDownloadJob::StartAsync() { |
94 if (IsCompletionUrl(request_->url())) { | 76 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) |
95 URLRequestSlowDownloadJob::FinishPendingRequests( | 77 URLRequestSlowDownloadJob::FinishPendingRequests(); |
96 request_->url().spec() == kErrorFinishDownloadUrl); | |
97 } | |
98 | 78 |
99 NotifyHeadersComplete(); | 79 NotifyHeadersComplete(); |
100 } | 80 } |
101 | 81 |
102 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, | 82 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
103 int *bytes_read) { | 83 int *bytes_read) { |
104 if (IsCompletionUrl(request_->url())) { | 84 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 85 request_->url().spec().c_str())) { |
105 *bytes_read = 0; | 86 *bytes_read = 0; |
106 return true; | 87 return true; |
107 } | 88 } |
108 | 89 |
109 if (should_send_second_chunk_) { | 90 if (should_send_second_chunk_) { |
110 DCHECK(buf_size > kSecondDownloadSize); | 91 DCHECK(buf_size > kSecondDownloadSize); |
111 for (int i = 0; i < kSecondDownloadSize; ++i) { | 92 for (int i = 0; i < kSecondDownloadSize; ++i) { |
112 buf->data()[i] = '*'; | 93 buf->data()[i] = '*'; |
113 } | 94 } |
114 *bytes_read = kSecondDownloadSize; | 95 *bytes_read = kSecondDownloadSize; |
(...skipping 29 matching lines...) Expand all Loading... |
144 | 125 |
145 // Return false to signal there is pending data. | 126 // Return false to signal there is pending data. |
146 return false; | 127 return false; |
147 } | 128 } |
148 | 129 |
149 void URLRequestSlowDownloadJob::CheckDoneStatus() { | 130 void URLRequestSlowDownloadJob::CheckDoneStatus() { |
150 if (should_finish_download_) { | 131 if (should_finish_download_) { |
151 should_send_second_chunk_ = true; | 132 should_send_second_chunk_ = true; |
152 SetStatus(net::URLRequestStatus()); | 133 SetStatus(net::URLRequestStatus()); |
153 NotifyReadComplete(kSecondDownloadSize); | 134 NotifyReadComplete(kSecondDownloadSize); |
154 } else if (should_error_download_) { | |
155 NotifyDone( | |
156 net::URLRequestStatus(net::URLRequestStatus::FAILED, net::ERR_FAILED)); | |
157 } else { | 135 } else { |
158 MessageLoop::current()->PostDelayedTask( | 136 MessageLoop::current()->PostDelayedTask( |
159 FROM_HERE, | 137 FROM_HERE, |
160 method_factory_.NewRunnableMethod( | 138 method_factory_.NewRunnableMethod( |
161 &URLRequestSlowDownloadJob::CheckDoneStatus), | 139 &URLRequestSlowDownloadJob::CheckDoneStatus), |
162 100); | 140 100); |
163 } | 141 } |
164 } | 142 } |
165 | 143 |
166 // Public virtual version. | 144 // Public virtual version. |
167 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { | 145 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { |
168 // Forward to private const version. | 146 // Forward to private const version. |
169 GetResponseInfoConst(info); | 147 GetResponseInfoConst(info); |
170 } | 148 } |
171 | 149 |
172 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {} | 150 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {} |
173 | 151 |
174 // Private const version. | 152 // Private const version. |
175 void URLRequestSlowDownloadJob::GetResponseInfoConst( | 153 void URLRequestSlowDownloadJob::GetResponseInfoConst( |
176 net::HttpResponseInfo* info) const { | 154 net::HttpResponseInfo* info) const { |
177 // Send back mock headers. | 155 // Send back mock headers. |
178 std::string raw_headers; | 156 std::string raw_headers; |
179 if (IsCompletionUrl(request_->url())) { | 157 if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
| 158 request_->url().spec().c_str())) { |
180 raw_headers.append( | 159 raw_headers.append( |
181 "HTTP/1.1 200 OK\n" | 160 "HTTP/1.1 200 OK\n" |
182 "Content-type: text/plain\n"); | 161 "Content-type: text/plain\n"); |
183 } else { | 162 } else { |
184 raw_headers.append( | 163 raw_headers.append( |
185 "HTTP/1.1 200 OK\n" | 164 "HTTP/1.1 200 OK\n" |
186 "Content-type: application/octet-stream\n" | 165 "Content-type: application/octet-stream\n" |
187 "Cache-Control: max-age=0\n"); | 166 "Cache-Control: max-age=0\n"); |
188 | 167 |
189 if (request_->url().spec() == kKnownSizeUrl) { | 168 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { |
190 raw_headers.append(base::StringPrintf( | 169 raw_headers.append(base::StringPrintf( |
191 "Content-Length: %d\n", | 170 "Content-Length: %d\n", |
192 kFirstDownloadSize + kSecondDownloadSize)); | 171 kFirstDownloadSize + kSecondDownloadSize)); |
193 } | 172 } |
194 } | 173 } |
195 | 174 |
196 // ParseRawHeaders expects \0 to end each header line. | 175 // ParseRawHeaders expects \0 to end each header line. |
197 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | 176 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
198 info->headers = new net::HttpResponseHeaders(raw_headers); | 177 info->headers = new net::HttpResponseHeaders(raw_headers); |
199 } | 178 } |
200 | 179 |
201 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | 180 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { |
202 net::HttpResponseInfo info; | 181 net::HttpResponseInfo info; |
203 GetResponseInfoConst(&info); | 182 GetResponseInfoConst(&info); |
204 return info.headers && info.headers->GetMimeType(mime_type); | 183 return info.headers && info.headers->GetMimeType(mime_type); |
205 } | 184 } |
OLD | NEW |