Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(266)

Unified Diff: content/test/net/url_request_slow_download_job.cc

Issue 11363222: Persist download interrupt reason, both target and current paths, and url_chain. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Merged to r180302 Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « content/test/net/url_request_slow_download_job.h ('k') | no next file » | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: content/test/net/url_request_slow_download_job.cc
diff --git a/content/test/net/url_request_slow_download_job.cc b/content/test/net/url_request_slow_download_job.cc
index ca1ccc039337c305e53f47837f8b845cc579b373..62e382903d52e6d4ac5433da5805261ece4bb7b4 100644
--- a/content/test/net/url_request_slow_download_job.cc
+++ b/content/test/net/url_request_slow_download_job.cc
@@ -13,6 +13,7 @@
#include "content/public/browser/browser_thread.h"
#include "googleurl/src/gurl.h"
#include "net/base/io_buffer.h"
+#include "net/base/net_errors.h"
#include "net/http/http_response_headers.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_filter.h"
@@ -25,6 +26,8 @@ const char URLRequestSlowDownloadJob::kKnownSizeUrl[] =
"http://url.handled.by.slow.download/download-known-size";
const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] =
"http://url.handled.by.slow.download/download-finish";
+const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] =
+ "http://url.handled.by.slow.download/download-error";
const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35;
const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10;
@@ -49,6 +52,8 @@ void URLRequestSlowDownloadJob::AddUrlHandler() {
&URLRequestSlowDownloadJob::Factory);
filter->AddUrlHandler(GURL(kFinishDownloadUrl),
&URLRequestSlowDownloadJob::Factory);
+ filter->AddUrlHandler(GURL(kErrorDownloadUrl),
+ &URLRequestSlowDownloadJob::Factory);
}
// static
@@ -59,7 +64,8 @@ net::URLRequestJob* URLRequestSlowDownloadJob::Factory(
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(
request, network_delegate);
- if (request->url().spec() != kFinishDownloadUrl)
+ if (request->url().spec() != kFinishDownloadUrl &&
+ request->url().spec() != kErrorDownloadUrl)
pending_requests_.Get().insert(job);
return job;
}
@@ -80,10 +86,20 @@ void URLRequestSlowDownloadJob::FinishPendingRequests() {
}
}
+void URLRequestSlowDownloadJob::ErrorPendingRequests() {
+ typedef std::set<URLRequestSlowDownloadJob*> JobList;
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ for (JobList::iterator it = pending_requests_.Get().begin(); it !=
+ pending_requests_.Get().end(); ++it) {
+ (*it)->set_should_error_download();
+ }
+}
+
URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(
net::URLRequest* request, net::NetworkDelegate* network_delegate)
: net::URLRequestJob(request, network_delegate),
bytes_already_sent_(0),
+ should_error_download_(false),
should_finish_download_(false),
buffer_size_(0),
ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)) {
@@ -92,6 +108,8 @@ URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(
void URLRequestSlowDownloadJob::StartAsync() {
if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()))
URLRequestSlowDownloadJob::FinishPendingRequests();
+ if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str()))
+ URLRequestSlowDownloadJob::ErrorPendingRequests();
NotifyHeadersComplete();
}
@@ -150,8 +168,10 @@ URLRequestSlowDownloadJob::FillBufferHelper(
bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size,
int* bytes_read) {
if (LowerCaseEqualsASCII(kFinishDownloadUrl,
+ request_->url().spec().c_str()) ||
+ LowerCaseEqualsASCII(kErrorDownloadUrl,
request_->url().spec().c_str())) {
- VLOG(10) << __FUNCTION__ << " called w/ kFinishDownloadUrl.";
+ VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl.";
*bytes_read = 0;
return true;
}
@@ -190,6 +210,10 @@ void URLRequestSlowDownloadJob::CheckDoneStatus() {
buffer_ = NULL; // Release the reference.
SetStatus(net::URLRequestStatus());
NotifyReadComplete(bytes_written);
+ } else if (should_error_download_) {
+ VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set.";
+ NotifyDone(net::URLRequestStatus(
+ net::URLRequestStatus::FAILED, net::ERR_CONNECTION_RESET));
} else {
MessageLoop::current()->PostDelayedTask(
FROM_HERE,
@@ -216,6 +240,8 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst(
// Send back mock headers.
std::string raw_headers;
if (LowerCaseEqualsASCII(kFinishDownloadUrl,
+ request_->url().spec().c_str()) ||
+ LowerCaseEqualsASCII(kErrorDownloadUrl,
request_->url().spec().c_str())) {
raw_headers.append(
"HTTP/1.1 200 OK\n"
« no previous file with comments | « content/test/net/url_request_slow_download_job.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698