Index: content/browser/net/url_request_slow_download_job.cc |
diff --git a/content/browser/net/url_request_slow_download_job.cc b/content/browser/net/url_request_slow_download_job.cc |
index ef1370ff32329b41538af42a002002efbe73ef68..4cb152c67e3e7afccbac59df9afecec3678e57d6 100644 |
--- a/content/browser/net/url_request_slow_download_job.cc |
+++ b/content/browser/net/url_request_slow_download_job.cc |
@@ -10,9 +10,11 @@ |
#include "base/string_util.h" |
#include "googleurl/src/gurl.h" |
#include "net/base/io_buffer.h" |
+#include "net/base/net_errors.h" |
#include "net/http/http_response_headers.h" |
#include "net/url_request/url_request.h" |
#include "net/url_request/url_request_filter.h" |
+#include "net/url_request/url_request_status.h" |
const int kFirstDownloadSize = 1024 * 35; |
const int kSecondDownloadSize = 1024 * 10; |
@@ -23,9 +25,18 @@ const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = |
"http://url.handled.by.slow.download/download-known-size"; |
const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = |
"http://url.handled.by.slow.download/download-finish"; |
+const char URLRequestSlowDownloadJob::kErrorFinishDownloadUrl[] = |
+ "http://url.handled.by.slow.download/download-error"; |
std::vector<URLRequestSlowDownloadJob*> |
- URLRequestSlowDownloadJob::kPendingRequests; |
+ URLRequestSlowDownloadJob::pending_requests_; |
+ |
+// Return whether this is the finish or error URL. |
+static bool IsCompletionUrl(const GURL& url) { |
+ if (url.spec() == URLRequestSlowDownloadJob::kFinishDownloadUrl) |
ahendrickson
2011/09/11 15:57:02
Here and in other parts of the file: Why did you
Randy Smith (Not in Mondays)
2011/09/12 18:00:27
In response to a comment of Eric's on the original
|
+ return true; |
+ return (url.spec() == URLRequestSlowDownloadJob::kErrorFinishDownloadUrl); |
+} |
void URLRequestSlowDownloadJob::Start() { |
MessageLoop::current()->PostTask( |
@@ -43,6 +54,8 @@ void URLRequestSlowDownloadJob::AddUrlHandler() { |
&URLRequestSlowDownloadJob::Factory); |
filter->AddUrlHandler(GURL(kFinishDownloadUrl), |
&URLRequestSlowDownloadJob::Factory); |
+ filter->AddUrlHandler(GURL(kErrorFinishDownloadUrl), |
+ &URLRequestSlowDownloadJob::Factory); |
} |
/*static */ |
@@ -50,19 +63,23 @@ net::URLRequestJob* URLRequestSlowDownloadJob::Factory( |
net::URLRequest* request, |
const std::string& scheme) { |
URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request); |
- if (request->url().spec() != kFinishDownloadUrl) |
- URLRequestSlowDownloadJob::kPendingRequests.push_back(job); |
+ if (!IsCompletionUrl(request->url())) |
+ URLRequestSlowDownloadJob::pending_requests_.push_back(job); |
return job; |
} |
/* static */ |
-void URLRequestSlowDownloadJob::FinishPendingRequests() { |
+void URLRequestSlowDownloadJob::FinishPendingRequests(bool error) { |
typedef std::vector<URLRequestSlowDownloadJob*> JobList; |
- for (JobList::iterator it = kPendingRequests.begin(); it != |
- kPendingRequests.end(); ++it) { |
- (*it)->set_should_finish_download(); |
+ for (JobList::iterator it = pending_requests_.begin(); it != |
+ pending_requests_.end(); ++it) { |
+ if (error) { |
+ (*it)->set_should_error_download(); |
+ } else { |
+ (*it)->set_should_finish_download(); |
+ } |
} |
- kPendingRequests.clear(); |
+ pending_requests_.clear(); |
} |
URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) |
@@ -70,19 +87,21 @@ URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request) |
first_download_size_remaining_(kFirstDownloadSize), |
should_finish_download_(false), |
should_send_second_chunk_(false), |
+ should_error_download_(false), |
ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {} |
void URLRequestSlowDownloadJob::StartAsync() { |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) |
- URLRequestSlowDownloadJob::FinishPendingRequests(); |
+ if (IsCompletionUrl(request_->url())) { |
+ URLRequestSlowDownloadJob::FinishPendingRequests( |
+ request_->url().spec() == kErrorFinishDownloadUrl); |
+ } |
NotifyHeadersComplete(); |
} |
bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, |
int *bytes_read) { |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
- request_->url().spec().c_str())) { |
+ if (IsCompletionUrl(request_->url())) { |
*bytes_read = 0; |
return true; |
} |
@@ -132,6 +151,9 @@ void URLRequestSlowDownloadJob::CheckDoneStatus() { |
should_send_second_chunk_ = true; |
SetStatus(net::URLRequestStatus()); |
NotifyReadComplete(kSecondDownloadSize); |
+ } else if (should_error_download_) { |
+ NotifyDone( |
+ net::URLRequestStatus(net::URLRequestStatus::FAILED, net::ERR_FAILED)); |
} else { |
MessageLoop::current()->PostDelayedTask( |
FROM_HERE, |
@@ -154,8 +176,7 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( |
net::HttpResponseInfo* info) const { |
// Send back mock headers. |
std::string raw_headers; |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
- request_->url().spec().c_str())) { |
+ if (IsCompletionUrl(request_->url())) { |
raw_headers.append( |
"HTTP/1.1 200 OK\n" |
"Content-type: text/plain\n"); |
@@ -165,7 +186,7 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( |
"Content-type: application/octet-stream\n" |
"Cache-Control: max-age=0\n"); |
- if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { |
+ if (request_->url().spec() == kKnownSizeUrl) { |
raw_headers.append(base::StringPrintf( |
"Content-Length: %d\n", |
kFirstDownloadSize + kSecondDownloadSize)); |