OLD | NEW |
---|---|
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/net/url_request_slow_download_job.h" | 5 #include "content/browser/net/url_request_slow_download_job.h" |
6 | 6 |
7 #include "base/compiler_specific.h" | 7 #include "base/compiler_specific.h" |
8 #include "base/message_loop.h" | 8 #include "base/message_loop.h" |
9 #include "base/stringprintf.h" | 9 #include "base/stringprintf.h" |
10 #include "base/string_util.h" | 10 #include "base/string_util.h" |
11 #include "googleurl/src/gurl.h" | 11 #include "googleurl/src/gurl.h" |
12 #include "net/base/io_buffer.h" | 12 #include "net/base/io_buffer.h" |
13 #include "net/base/net_errors.h" | |
13 #include "net/http/http_response_headers.h" | 14 #include "net/http/http_response_headers.h" |
14 #include "net/url_request/url_request.h" | 15 #include "net/url_request/url_request.h" |
15 #include "net/url_request/url_request_filter.h" | 16 #include "net/url_request/url_request_filter.h" |
17 #include "net/url_request/url_request_status.h" | |
16 | 18 |
17 const int kFirstDownloadSize = 1024 * 35; | 19 const int kFirstDownloadSize = 1024 * 35; |
18 const int kSecondDownloadSize = 1024 * 10; | 20 const int kSecondDownloadSize = 1024 * 10; |
19 | 21 |
20 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = | 22 const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = |
21 "http://url.handled.by.slow.download/download-unknown-size"; | 23 "http://url.handled.by.slow.download/download-unknown-size"; |
22 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = | 24 const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = |
23 "http://url.handled.by.slow.download/download-known-size"; | 25 "http://url.handled.by.slow.download/download-known-size"; |
24 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = | 26 const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = |
25 "http://url.handled.by.slow.download/download-finish"; | 27 "http://url.handled.by.slow.download/download-finish"; |
28 const char URLRequestSlowDownloadJob::kErrorFinishDownloadUrl[] = | |
29 "http://url.handled.by.slow.download/download-error"; | |
26 | 30 |
27 std::vector<URLRequestSlowDownloadJob*> | 31 std::vector<URLRequestSlowDownloadJob*> |
28 URLRequestSlowDownloadJob::kPendingRequests; | 32 URLRequestSlowDownloadJob::pending_requests_; |
33 | |
34 // Return whether this is the finish or error URL. | |
35 static bool IsCompletionUrl(GURL url) { | |
brettw
2011/07/08 19:48:22
This should probably take a const ref to avoid a c
Randy Smith (Not in Mondays)
2011/07/13 21:11:38
Done.
| |
36 if (url.spec() == URLRequestSlowDownloadJob::kFinishDownloadUrl) | |
37 return true; | |
38 return (url.spec() == URLRequestSlowDownloadJob::kErrorFinishDownloadUrl); | |
39 } | |
29 | 40 |
30 void URLRequestSlowDownloadJob::Start() { | 41 void URLRequestSlowDownloadJob::Start() { |
31 MessageLoop::current()->PostTask( | 42 MessageLoop::current()->PostTask( |
32 FROM_HERE, | 43 FROM_HERE, |
33 method_factory_.NewRunnableMethod( | 44 method_factory_.NewRunnableMethod( |
34 &URLRequestSlowDownloadJob::StartAsync)); | 45 &URLRequestSlowDownloadJob::StartAsync)); |
35 } | 46 } |
36 | 47 |
37 // static | 48 // static |
38 void URLRequestSlowDownloadJob::AddUrlHandler() { | 49 void URLRequestSlowDownloadJob::AddUrlHandler() { |
39 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); | 50 net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); |
40 filter->AddUrlHandler(GURL(kUnknownSizeUrl), | 51 filter->AddUrlHandler(GURL(kUnknownSizeUrl), |
41 &URLRequestSlowDownloadJob::Factory); | 52 &URLRequestSlowDownloadJob::Factory); |
42 filter->AddUrlHandler(GURL(kKnownSizeUrl), | 53 filter->AddUrlHandler(GURL(kKnownSizeUrl), |
43 &URLRequestSlowDownloadJob::Factory); | 54 &URLRequestSlowDownloadJob::Factory); |
44 filter->AddUrlHandler(GURL(kFinishDownloadUrl), | 55 filter->AddUrlHandler(GURL(kFinishDownloadUrl), |
45 &URLRequestSlowDownloadJob::Factory); | 56 &URLRequestSlowDownloadJob::Factory); |
57 filter->AddUrlHandler(GURL(kErrorFinishDownloadUrl), | |
58 &URLRequestSlowDownloadJob::Factory); | |
46 } | 59 } |
47 | 60 |
48 /*static */ | 61 /*static */ |
49 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( | 62 net::URLRequestJob* URLRequestSlowDownloadJob::Factory( |
50 net::URLRequest* request, | 63 net::URLRequest* request, |
51 const std::string& scheme) { | 64 const std::string& scheme) { |
52 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request); | 65 URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request); |
53 if (request->url().spec() != kFinishDownloadUrl) | 66 if (!IsCompletionUrl(request->url())) |
54 URLRequestSlowDownloadJob::kPendingRequests.push_back(job); | 67 URLRequestSlowDownloadJob::pending_requests_.push_back(job); |
55 return job; | 68 return job; |
56 } | 69 } |
57 | 70 |
58 /* static */ | 71 /* static */ |
59 void URLRequestSlowDownloadJob::FinishPendingRequests() { | 72 void URLRequestSlowDownloadJob::FinishPendingRequests(bool error) { |
60 typedef std::vector<URLRequestSlowDownloadJob*> JobList; | 73 typedef std::vector<URLRequestSlowDownloadJob*> JobList; |
61 for (JobList::iterator it = kPendingRequests.begin(); it != | 74 for (JobList::iterator it = pending_requests_.begin(); it != |
62 kPendingRequests.end(); ++it) { | 75 pending_requests_.end(); ++it) { |
63 (*it)->set_should_finish_download(); | 76 if (error) { |
77 (*it)->set_should_error_download(); | |
78 } else { | |
79 (*it)->set_should_finish_download(); | |
80 } | |
64 } | 81 } |
65 kPendingRequests.clear(); | 82 pending_requests_.clear(); |
66 } | 83 } |
67 | 84 |
68 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) | 85 URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) |
69 : net::URLRequestJob(request), | 86 : net::URLRequestJob(request), |
70 first_download_size_remaining_(kFirstDownloadSize), | 87 first_download_size_remaining_(kFirstDownloadSize), |
71 should_finish_download_(false), | 88 should_finish_download_(false), |
72 should_send_second_chunk_(false), | 89 should_send_second_chunk_(false), |
90 should_error_download_(false), | |
73 ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {} | 91 ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {} |
74 | 92 |
75 void URLRequestSlowDownloadJob::StartAsync() { | 93 void URLRequestSlowDownloadJob::StartAsync() { |
76 if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) | 94 if (IsCompletionUrl(request_->url())) { |
77 URLRequestSlowDownloadJob::FinishPendingRequests(); | 95 URLRequestSlowDownloadJob::FinishPendingRequests( |
96 request_->url().spec() == kErrorFinishDownloadUrl); | |
97 } | |
78 | 98 |
79 NotifyHeadersComplete(); | 99 NotifyHeadersComplete(); |
80 } | 100 } |
81 | 101 |
82 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, | 102 bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
83 int *bytes_read) { | 103 int *bytes_read) { |
84 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 104 if (IsCompletionUrl(request_->url())) { |
85 request_->url().spec().c_str())) { | |
86 *bytes_read = 0; | 105 *bytes_read = 0; |
87 return true; | 106 return true; |
88 } | 107 } |
89 | 108 |
90 if (should_send_second_chunk_) { | 109 if (should_send_second_chunk_) { |
91 DCHECK(buf_size > kSecondDownloadSize); | 110 DCHECK(buf_size > kSecondDownloadSize); |
92 for (int i = 0; i < kSecondDownloadSize; ++i) { | 111 for (int i = 0; i < kSecondDownloadSize; ++i) { |
93 buf->data()[i] = '*'; | 112 buf->data()[i] = '*'; |
94 } | 113 } |
95 *bytes_read = kSecondDownloadSize; | 114 *bytes_read = kSecondDownloadSize; |
(...skipping 29 matching lines...) Expand all Loading... | |
125 | 144 |
126 // Return false to signal there is pending data. | 145 // Return false to signal there is pending data. |
127 return false; | 146 return false; |
128 } | 147 } |
129 | 148 |
130 void URLRequestSlowDownloadJob::CheckDoneStatus() { | 149 void URLRequestSlowDownloadJob::CheckDoneStatus() { |
131 if (should_finish_download_) { | 150 if (should_finish_download_) { |
132 should_send_second_chunk_ = true; | 151 should_send_second_chunk_ = true; |
133 SetStatus(net::URLRequestStatus()); | 152 SetStatus(net::URLRequestStatus()); |
134 NotifyReadComplete(kSecondDownloadSize); | 153 NotifyReadComplete(kSecondDownloadSize); |
154 } else if (should_error_download_) { | |
155 NotifyDone( | |
156 net::URLRequestStatus(net::URLRequestStatus::FAILED, net::ERR_FAILED)); | |
135 } else { | 157 } else { |
136 MessageLoop::current()->PostDelayedTask( | 158 MessageLoop::current()->PostDelayedTask( |
137 FROM_HERE, | 159 FROM_HERE, |
138 method_factory_.NewRunnableMethod( | 160 method_factory_.NewRunnableMethod( |
139 &URLRequestSlowDownloadJob::CheckDoneStatus), | 161 &URLRequestSlowDownloadJob::CheckDoneStatus), |
140 100); | 162 100); |
141 } | 163 } |
142 } | 164 } |
143 | 165 |
144 // Public virtual version. | 166 // Public virtual version. |
145 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { | 167 void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { |
146 // Forward to private const version. | 168 // Forward to private const version. |
147 GetResponseInfoConst(info); | 169 GetResponseInfoConst(info); |
148 } | 170 } |
149 | 171 |
150 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {} | 172 URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {} |
151 | 173 |
152 // Private const version. | 174 // Private const version. |
153 void URLRequestSlowDownloadJob::GetResponseInfoConst( | 175 void URLRequestSlowDownloadJob::GetResponseInfoConst( |
154 net::HttpResponseInfo* info) const { | 176 net::HttpResponseInfo* info) const { |
155 // Send back mock headers. | 177 // Send back mock headers. |
156 std::string raw_headers; | 178 std::string raw_headers; |
157 if (LowerCaseEqualsASCII(kFinishDownloadUrl, | 179 if (IsCompletionUrl(request_->url())) { |
158 request_->url().spec().c_str())) { | |
159 raw_headers.append( | 180 raw_headers.append( |
160 "HTTP/1.1 200 OK\n" | 181 "HTTP/1.1 200 OK\n" |
161 "Content-type: text/plain\n"); | 182 "Content-type: text/plain\n"); |
162 } else { | 183 } else { |
163 raw_headers.append( | 184 raw_headers.append( |
164 "HTTP/1.1 200 OK\n" | 185 "HTTP/1.1 200 OK\n" |
165 "Content-type: application/octet-stream\n" | 186 "Content-type: application/octet-stream\n" |
166 "Cache-Control: max-age=0\n"); | 187 "Cache-Control: max-age=0\n"); |
167 | 188 |
168 if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { | 189 if (request_->url().spec() == kKnownSizeUrl) { |
169 raw_headers.append(base::StringPrintf( | 190 raw_headers.append(base::StringPrintf( |
170 "Content-Length: %d\n", | 191 "Content-Length: %d\n", |
171 kFirstDownloadSize + kSecondDownloadSize)); | 192 kFirstDownloadSize + kSecondDownloadSize)); |
172 } | 193 } |
173 } | 194 } |
174 | 195 |
175 // ParseRawHeaders expects \0 to end each header line. | 196 // ParseRawHeaders expects \0 to end each header line. |
176 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); | 197 ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
177 info->headers = new net::HttpResponseHeaders(raw_headers); | 198 info->headers = new net::HttpResponseHeaders(raw_headers); |
178 } | 199 } |
179 | 200 |
180 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { | 201 bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { |
181 net::HttpResponseInfo info; | 202 net::HttpResponseInfo info; |
182 GetResponseInfoConst(&info); | 203 GetResponseInfoConst(&info); |
183 return info.headers && info.headers->GetMimeType(mime_type); | 204 return info.headers && info.headers->GetMimeType(mime_type); |
184 } | 205 } |
OLD | NEW |