Index: net/test/url_request/url_request_slow_download_job.cc |
diff --git a/net/test/url_request/url_request_slow_download_job.cc b/net/test/url_request/url_request_slow_download_job.cc |
index 718a9f05502cef03e616160b891d75e8da05ae22..d94ad39e7640e329b274bdb5e42a6ae0c12b50ed 100644 |
--- a/net/test/url_request/url_request_slow_download_job.cc |
+++ b/net/test/url_request/url_request_slow_download_job.cc |
@@ -101,10 +101,14 @@ URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( |
} |
void URLRequestSlowDownloadJob::StartAsync() { |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) |
+ if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
+ request_->url().spec().c_str())) { |
URLRequestSlowDownloadJob::FinishPendingRequests(); |
- if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) |
+ } |
+ if (base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
+ request_->url().spec().c_str())) { |
URLRequestSlowDownloadJob::ErrorPendingRequests(); |
+ } |
NotifyHeadersComplete(); |
} |
@@ -164,9 +168,10 @@ URLRequestSlowDownloadJob::FillBufferHelper(IOBuffer* buf, |
bool URLRequestSlowDownloadJob::ReadRawData(IOBuffer* buf, |
int buf_size, |
int* bytes_read) { |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
- request_->url().spec().c_str()) || |
- LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { |
+ if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
+ request_->url().spec().c_str()) || |
+ base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
+ request_->url().spec().c_str())) { |
VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; |
*bytes_read = 0; |
return true; |
@@ -233,9 +238,10 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( |
HttpResponseInfo* info) const { |
// Send back mock headers. |
std::string raw_headers; |
- if (LowerCaseEqualsASCII(kFinishDownloadUrl, |
- request_->url().spec().c_str()) || |
- LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { |
+ if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, |
+ request_->url().spec().c_str()) || |
+ base::LowerCaseEqualsASCII(kErrorDownloadUrl, |
+ request_->url().spec().c_str())) { |
raw_headers.append( |
"HTTP/1.1 200 OK\n" |
"Content-type: text/plain\n"); |
@@ -245,14 +251,16 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( |
"Content-type: application/octet-stream\n" |
"Cache-Control: max-age=0\n"); |
- if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { |
+ if (base::LowerCaseEqualsASCII(kKnownSizeUrl, |
+ request_->url().spec().c_str())) { |
raw_headers.append(base::StringPrintf( |
"Content-Length: %d\n", kFirstDownloadSize + kSecondDownloadSize)); |
} |
} |
// ParseRawHeaders expects \0 to end each header line. |
- ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); |
+ base::ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", |
+ std::string("\0", 1)); |
info->headers = new HttpResponseHeaders(raw_headers); |
} |