Index: net/url_request/url_request_job.cc |
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc |
index d2fa68577ea4e6452d34bdad65a7828d912d9b07..baa5fc83eb1b562ff991da1240e76a28eeb0327b 100644 |
--- a/net/url_request/url_request_job.cc |
+++ b/net/url_request/url_request_job.cc |
@@ -65,7 +65,7 @@ void URLRequestJob::DetachRequest() { |
// This function calls ReadData to get stream data. If a filter exists, passes |
// the data to the attached filter. Then returns the output from filter back to |
// the caller. |
-bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) { |
+bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int* bytes_read) { |
bool rv = false; |
DCHECK_LT(buf_size, 1000000); // Sanity check. |
@@ -86,7 +86,7 @@ bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) { |
filtered_read_buffer_len_ = buf_size; |
if (ReadFilteredData(bytes_read)) { |
- rv = true; // We have data to return. |
+ rv = true; // We have data to return. |
// It is fine to call DoneReading even if ReadFilteredData receives 0 |
// bytes from the net, but we avoid making that call if we know for |
@@ -142,8 +142,7 @@ Filter* URLRequestJob::SetupFilter() const { |
return NULL; |
} |
-bool URLRequestJob::IsRedirectResponse(GURL* location, |
- int* http_status_code) { |
+bool URLRequestJob::IsRedirectResponse(GURL* location, int* http_status_code) { |
// For non-HTTP jobs, headers will be null. |
HttpResponseHeaders* headers = request_->response_headers(); |
if (!headers) |
@@ -189,8 +188,7 @@ void URLRequestJob::CancelAuth() { |
NOTREACHED(); |
} |
-void URLRequestJob::ContinueWithCertificate( |
- X509Certificate* client_cert) { |
+void URLRequestJob::ContinueWithCertificate(X509Certificate* client_cert) { |
// The derived class should implement this! |
NOTREACHED(); |
} |
@@ -399,7 +397,7 @@ void URLRequestJob::NotifyReadComplete(int bytes_read) { |
// TODO(darin): Bug 1004233. Re-enable this test once all of the chrome |
// unit_tests have been fixed to not trip this. |
- //DCHECK(!request_->status().is_io_pending()); |
+ // DCHECK(!request_->status().is_io_pending()); |
// The headers should be complete before reads complete |
DCHECK(has_handled_response_); |
@@ -438,7 +436,7 @@ void URLRequestJob::NotifyReadComplete(int bytes_read) { |
<< " post total = " << postfilter_bytes_read_; |
} |
-void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { |
+void URLRequestJob::NotifyStartError(const URLRequestStatus& status) { |
DCHECK(!has_handled_response_); |
has_handled_response_ = true; |
if (request_) { |
@@ -452,7 +450,7 @@ void URLRequestJob::NotifyStartError(const URLRequestStatus &status) { |
} |
} |
-void URLRequestJob::NotifyDone(const URLRequestStatus &status) { |
+void URLRequestJob::NotifyDone(const URLRequestStatus& status) { |
DCHECK(!done_) << "Job sending done notification twice"; |
if (done_) |
return; |
@@ -491,8 +489,7 @@ void URLRequestJob::NotifyDone(const URLRequestStatus &status) { |
void URLRequestJob::CompleteNotifyDone() { |
// Check if we should notify the delegate that we're done because of an error. |
- if (request_ && |
- !request_->status().is_success() && |
+ if (request_ && !request_->status().is_success() && |
request_->has_delegate()) { |
// We report the error differently depending on whether we've called |
// OnResponseStarted yet. |
@@ -526,8 +523,7 @@ void URLRequestJob::OnCallToDelegateComplete() { |
request_->OnCallToDelegateComplete(); |
} |
-bool URLRequestJob::ReadRawData(IOBuffer* buf, int buf_size, |
- int *bytes_read) { |
+bool URLRequestJob::ReadRawData(IOBuffer* buf, int buf_size, int* bytes_read) { |
DCHECK(bytes_read); |
*bytes_read = 0; |
return true; |
@@ -627,10 +623,11 @@ bool URLRequestJob::ReadFilteredData(int* bytes_read) { |
case Filter::FILTER_ERROR: { |
DVLOG(1) << __FUNCTION__ << "() " |
<< "\"" << (request_ ? request_->url().spec() : "???") |
- << "\"" << " Filter Error"; |
+ << "\"" |
+ << " Filter Error"; |
filter_needs_more_output_space_ = false; |
NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, |
- ERR_CONTENT_DECODING_FAILED)); |
+ ERR_CONTENT_DECODING_FAILED)); |
rv = false; |
break; |
} |
@@ -647,7 +644,8 @@ bool URLRequestJob::ReadFilteredData(int* bytes_read) { |
filtered_data_len > 0) { |
request()->net_log().AddByteTransferEvent( |
NetLog::TYPE_URL_REQUEST_JOB_FILTERED_BYTES_READ, |
- filtered_data_len, filtered_read_buffer_->data()); |
+ filtered_data_len, |
+ filtered_read_buffer_->data()); |
} |
} else { |
// we are done, or there is no data left. |
@@ -673,11 +671,10 @@ const URLRequestStatus URLRequestJob::GetStatus() { |
if (request_) |
return request_->status(); |
// If the request is gone, we must be cancelled. |
- return URLRequestStatus(URLRequestStatus::CANCELED, |
- ERR_ABORTED); |
+ return URLRequestStatus(URLRequestStatus::CANCELED, ERR_ABORTED); |
} |
-void URLRequestJob::SetStatus(const URLRequestStatus &status) { |
+void URLRequestJob::SetStatus(const URLRequestStatus& status) { |
if (request_) |
request_->set_status(status); |
} |
@@ -701,7 +698,8 @@ bool URLRequestJob::ReadRawDataForFilter(int* bytes_read) { |
return rv; |
} |
-bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf, int buf_size, |
+bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf, |
+ int buf_size, |
int* bytes_read) { |
DCHECK(!request_->status().is_io_pending()); |
DCHECK(raw_read_buffer_.get() == NULL); |
@@ -734,7 +732,8 @@ void URLRequestJob::OnRawReadComplete(int bytes_read) { |
bytes_read > 0) { |
request()->net_log().AddByteTransferEvent( |
NetLog::TYPE_URL_REQUEST_JOB_BYTES_READ, |
- bytes_read, raw_read_buffer_->data()); |
+ bytes_read, |
+ raw_read_buffer_->data()); |
} |
if (bytes_read > 0) { |
@@ -759,7 +758,7 @@ void URLRequestJob::RecordBytesRead(int bytes_read) { |
} |
bool URLRequestJob::FilterHasData() { |
- return filter_.get() && filter_->stream_data_len(); |
+ return filter_.get() && filter_->stream_data_len(); |
} |
void URLRequestJob::UpdatePacketReadTimes() { |