Index: net/url_request/url_request_http_job.cc |
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc |
index b7f8d4c6d39fc9b29394320f53efedfa7c6b46df..8d164cada44f6c354411b312de12ef09901b568c 100644 |
--- a/net/url_request/url_request_http_job.cc |
+++ b/net/url_request/url_request_http_job.cc |
@@ -283,7 +283,29 @@ void URLRequestHttpJob::NotifyHeadersComplete() { |
URLRequestJob::NotifyHeadersComplete(); |
} |
-void URLRequestHttpJob::NotifyDone(const URLRequestStatus& status) { |
+void URLRequestHttpJob::NotifyDone(const URLRequestStatus& original_status) { |
+ URLRequestStatus status(original_status); |
+ // Some servers send the body compressed, but specify the content length as |
+ // the uncompressed size. Although this violates the HTTP spec we want to |
+ // support it (as IE and FireFox do), but *only* for an exact match. |
+ // See http://crbug.com/79694. |
+ if (status.os_error() == net::ERR_CONNECTION_CLOSED) { |
+ if (request_ && request_->response_headers()) { |
+ int64 expected_length = request_->response_headers()->GetContentLength(); |
+ VLOG(21) << __FUNCTION__ << "() " |
+ << "\"" << request_->url().spec() << "\"" |
+ << " content-length = " << expected_length |
+ << " pre total = " << prefilter_bytes_read() |
+ << " post total = " << postfilter_bytes_read(); |
+ if (0 == expected_length || |
+ postfilter_bytes_read() == expected_length || |
+ prefilter_bytes_read() == expected_length) { |
rvargas (doing something else)
2011/05/19 19:30:36
if expected_length is 0 or prefilter_bytes_read we
ahendrickson
2011/05/19 22:29:34
Done.
|
+ // Clear the error. |
+ status = URLRequestStatus(); |
+ } |
+ } |
+ } |
+ |
RecordCompressionHistograms(); |
URLRequestJob::NotifyDone(status); |
} |