Index: net/http/http_response_headers.cc |
diff --git a/net/http/http_response_headers.cc b/net/http/http_response_headers.cc |
index 940c0484fad87939cec6a948ea7ae0421d4319b6..94d9374f38d7fc59308a70f9a3a4d628141cc9b2 100644 |
--- a/net/http/http_response_headers.cc |
+++ b/net/http/http_response_headers.cc |
@@ -44,61 +44,39 @@ namespace { |
// These headers are RFC 2616 hop-by-hop headers; |
// not to be stored by caches. |
const char* const kHopByHopResponseHeaders[] = { |
- "connection", |
- "proxy-connection", |
- "keep-alive", |
- "trailer", |
- "transfer-encoding", |
- "upgrade" |
-}; |
+ "connection", "proxy-connection", "keep-alive", |
+ "trailer", "transfer-encoding", "upgrade"}; |
// These headers are challenge response headers; |
// not to be stored by caches. |
-const char* const kChallengeResponseHeaders[] = { |
- "www-authenticate", |
- "proxy-authenticate" |
-}; |
+const char* const kChallengeResponseHeaders[] = {"www-authenticate", |
+ "proxy-authenticate"}; |
// These headers are cookie setting headers; |
// not to be stored by caches or disclosed otherwise. |
-const char* const kCookieResponseHeaders[] = { |
- "set-cookie", |
- "set-cookie2" |
-}; |
+const char* const kCookieResponseHeaders[] = {"set-cookie", "set-cookie2"}; |
// By default, do not cache Strict-Transport-Security or Public-Key-Pins. |
// This avoids erroneously re-processing them on page loads from cache --- |
// they are defined to be valid only on live and error-free HTTPS |
// connections. |
-const char* const kSecurityStateHeaders[] = { |
- "strict-transport-security", |
- "public-key-pins" |
-}; |
+const char* const kSecurityStateHeaders[] = {"strict-transport-security", |
+ "public-key-pins"}; |
// These response headers are not copied from a 304/206 response to the cached |
// response headers. This list is based on Mozilla's nsHttpResponseHead.cpp. |
const char* const kNonUpdatedHeaders[] = { |
- "connection", |
- "proxy-connection", |
- "keep-alive", |
- "www-authenticate", |
- "proxy-authenticate", |
- "trailer", |
- "transfer-encoding", |
- "upgrade", |
- "etag", |
- "x-frame-options", |
- "x-xss-protection", |
+ "connection", "proxy-connection", "keep-alive", |
+ "www-authenticate", "proxy-authenticate", "trailer", |
+ "transfer-encoding", "upgrade", "etag", |
+ "x-frame-options", "x-xss-protection", |
}; |
// Some header prefixes mean "Don't copy this header from a 304 response.". |
// Rather than listing all the relevant headers, we can consolidate them into |
// this list: |
-const char* const kNonUpdatedHeaderPrefixes[] = { |
- "content-", |
- "x-content-", |
- "x-webkit-" |
-}; |
+const char* const kNonUpdatedHeaderPrefixes[] = {"content-", "x-content-", |
+ "x-webkit-"}; |
bool ShouldUpdateHeader(const std::string::const_iterator& name_begin, |
const std::string::const_iterator& name_end) { |
@@ -108,7 +86,8 @@ bool ShouldUpdateHeader(const std::string::const_iterator& name_begin, |
} |
for (size_t i = 0; i < arraysize(kNonUpdatedHeaderPrefixes); ++i) { |
if (StartsWithASCII(std::string(name_begin, name_end), |
- kNonUpdatedHeaderPrefixes[i], false)) |
+ kNonUpdatedHeaderPrefixes[i], |
+ false)) |
return false; |
} |
return true; |
@@ -152,13 +131,13 @@ HttpResponseHeaders::HttpResponseHeaders(const std::string& raw_input) |
// that would actually create a double call between the original |
// HttpResponseHeader that was serialized, and initialization of the |
// new object from that pickle. |
- UMA_HISTOGRAM_CUSTOM_ENUMERATION("Net.HttpResponseCode", |
- HttpUtil::MapStatusCodeForHistogram( |
- response_code_), |
- // Note the third argument is only |
- // evaluated once, see macro |
- // definition for details. |
- HttpUtil::GetStatusCodesForHistogram()); |
+ UMA_HISTOGRAM_CUSTOM_ENUMERATION( |
+ "Net.HttpResponseCode", |
+ HttpUtil::MapStatusCodeForHistogram(response_code_), |
+ // Note the third argument is only |
+ // evaluated once, see macro |
+ // definition for details. |
+ HttpUtil::GetStatusCodesForHistogram()); |
} |
HttpResponseHeaders::HttpResponseHeaders(const Pickle& pickle, |
@@ -209,7 +188,8 @@ void HttpResponseHeaders::Persist(Pickle* pickle, PersistOptions options) { |
// Locate the start of the next header. |
size_t k = i; |
- while (++k < parsed_.size() && parsed_[k].is_continuation()) {} |
+ while (++k < parsed_.size() && parsed_[k].is_continuation()) { |
+ } |
--k; |
std::string header_name(parsed_[i].name_begin, parsed_[i].name_end); |
@@ -249,7 +229,8 @@ void HttpResponseHeaders::Update(const HttpResponseHeaders& new_headers) { |
// Locate the start of the next header. |
size_t k = i; |
- while (++k < new_parsed.size() && new_parsed[k].is_continuation()) {} |
+ while (++k < new_parsed.size() && new_parsed[k].is_continuation()) { |
+ } |
--k; |
const std::string::const_iterator& name_begin = new_parsed[i].name_begin; |
@@ -280,7 +261,8 @@ void HttpResponseHeaders::MergeWithHeaders(const std::string& raw_headers, |
// Locate the start of the next header. |
size_t k = i; |
- while (++k < parsed_.size() && parsed_[k].is_continuation()) {} |
+ while (++k < parsed_.size() && parsed_[k].is_continuation()) { |
+ } |
--k; |
std::string name(parsed_[i].name_begin, parsed_[i].name_end); |
@@ -374,10 +356,9 @@ void HttpResponseHeaders::ReplaceStatusLine(const std::string& new_status) { |
MergeWithHeaders(new_raw_headers, empty_to_remove); |
} |
-void HttpResponseHeaders::UpdateWithNewRange( |
- const HttpByteRange& byte_range, |
- int64 resource_size, |
- bool replace_status_line) { |
+void HttpResponseHeaders::UpdateWithNewRange(const HttpByteRange& byte_range, |
+ int64 resource_size, |
+ bool replace_status_line) { |
DCHECK(byte_range.IsValid()); |
DCHECK(byte_range.HasFirstBytePosition()); |
DCHECK(byte_range.HasLastBytePosition()); |
@@ -396,7 +377,10 @@ void HttpResponseHeaders::UpdateWithNewRange( |
ReplaceStatusLine("HTTP/1.1 206 Partial Content"); |
AddHeader(base::StringPrintf("%s: bytes %" PRId64 "-%" PRId64 "/%" PRId64, |
- kRangeHeader, start, end, resource_size)); |
+ kRangeHeader, |
+ start, |
+ end, |
+ resource_size)); |
AddHeader(base::StringPrintf("%s: %" PRId64, kLengthHeader, range_len)); |
} |
@@ -409,9 +393,9 @@ void HttpResponseHeaders::Parse(const std::string& raw_input) { |
std::find(line_begin, raw_input.end(), '\0'); |
// has_headers = true, if there is any data following the status line. |
// Used by ParseStatusLine() to decide if a HTTP/0.9 is really a HTTP/1.0. |
- bool has_headers = (line_end != raw_input.end() && |
- (line_end + 1) != raw_input.end() && |
- *(line_end + 1) != '\0'); |
+ bool has_headers = |
+ (line_end != raw_input.end() && (line_end + 1) != raw_input.end() && |
+ *(line_end + 1) != '\0'); |
ParseStatusLine(line_begin, line_end, has_headers); |
raw_headers_.push_back('\0'); // Terminate status line with a null. |
@@ -440,8 +424,8 @@ void HttpResponseHeaders::Parse(const std::string& raw_input) { |
// Adjust to point at the null byte following the status line |
line_end = raw_headers_.begin() + status_line_len - 1; |
- HttpUtil::HeadersIterator headers(line_end + 1, raw_headers_.end(), |
- std::string(1, '\0')); |
+ HttpUtil::HeadersIterator headers( |
+ line_end + 1, raw_headers_.end(), std::string(1, '\0')); |
while (headers.GetNext()) { |
AddHeader(headers.name_begin(), |
headers.name_end(), |
@@ -483,8 +467,8 @@ void HttpResponseHeaders::GetNormalizedHeaders(std::string* output) const { |
iter = headers_map.find(lower_name); |
if (iter == headers_map.end()) { |
- iter = headers_map.insert( |
- HeadersMap::value_type(lower_name, headers.size())).first; |
+ iter = headers_map.insert(HeadersMap::value_type(lower_name, |
+ headers.size())).first; |
headers.push_back(name + ": "); |
} else { |
headers[iter->second].append(", "); |
@@ -608,7 +592,9 @@ bool HttpResponseHeaders::HasHeaderValue(const base::StringPiece& name, |
std::string temp; |
while (EnumerateHeader(&iter, name, &temp)) { |
if (value.size() == temp.size() && |
- std::equal(temp.begin(), temp.end(), value.begin(), |
+ std::equal(temp.begin(), |
+ temp.end(), |
+ value.begin(), |
base::CaseInsensitiveCompare<char>())) |
return true; |
} |
@@ -655,7 +641,7 @@ HttpVersion HttpResponseHeaders::ParseVersion( |
return HttpVersion(); |
} |
- ++p; // from / to first digit. |
+ ++p; // from / to first digit. |
++dot; // from . to second digit. |
if (!(*p >= '0' && *p <= '9' && *dot >= '0' && *dot <= '9')) { |
@@ -750,7 +736,9 @@ size_t HttpResponseHeaders::FindHeader(size_t from, |
const std::string::const_iterator& name_begin = parsed_[i].name_begin; |
const std::string::const_iterator& name_end = parsed_[i].name_end; |
if (static_cast<size_t>(name_end - name_begin) == search.size() && |
- std::equal(name_begin, name_end, search.begin(), |
+ std::equal(name_begin, |
+ name_end, |
+ search.begin(), |
base::CaseInsensitiveCompare<char>())) |
return i; |
} |
@@ -806,7 +794,7 @@ void HttpResponseHeaders::AddNonCacheableHeaders(HeaderSet* result) const { |
continue; |
} |
// if it doesn't end with a quote, then treat as malformed |
- if (value[value.size()-1] != '\"') |
+ if (value[value.size() - 1] != '\"') |
continue; |
// process the value as a comma-separated list of items. Each |
@@ -919,11 +907,8 @@ bool HttpResponseHeaders::IsRedirect(std::string* location) const { |
bool HttpResponseHeaders::IsRedirectResponseCode(int response_code) { |
// Users probably want to see 300 (multiple choice) pages, so we don't count |
// them as redirects that need to be followed. |
- return (response_code == 301 || |
- response_code == 302 || |
- response_code == 303 || |
- response_code == 307 || |
- response_code == 308); |
+ return (response_code == 301 || response_code == 302 || |
+ response_code == 303 || response_code == 307 || response_code == 308); |
} |
// From RFC 2616 section 13.2.4: |
@@ -938,8 +923,7 @@ bool HttpResponseHeaders::IsRedirectResponseCode(int response_code) { |
bool HttpResponseHeaders::RequiresValidation(const Time& request_time, |
const Time& response_time, |
const Time& current_time) const { |
- TimeDelta lifetime = |
- GetFreshnessLifetime(response_time); |
+ TimeDelta lifetime = GetFreshnessLifetime(response_time); |
if (lifetime == TimeDelta()) |
return true; |
@@ -975,7 +959,7 @@ TimeDelta HttpResponseHeaders::GetFreshnessLifetime( |
HasHeaderValue("cache-control", "no-store") || |
HasHeaderValue("pragma", "no-cache") || |
HasHeaderValue("vary", "*")) // see RFC 2616 section 13.6 |
- return TimeDelta(); // not fresh |
+ return TimeDelta(); // not fresh |
// NOTE: "Cache-Control: max-age" overrides Expires, so we only check the |
// Expires header after checking for max-age in GetFreshnessLifetime. This |
@@ -1106,13 +1090,12 @@ bool HttpResponseHeaders::GetMaxAgeValue(TimeDelta* result) const { |
void* iter = NULL; |
while (EnumerateHeader(&iter, name, &value)) { |
if (value.size() > kMaxAgePrefixLen) { |
- if (LowerCaseEqualsASCII(value.begin(), |
- value.begin() + kMaxAgePrefixLen, |
- kMaxAgePrefix)) { |
+ if (LowerCaseEqualsASCII( |
+ value.begin(), value.begin() + kMaxAgePrefixLen, kMaxAgePrefix)) { |
int64 seconds; |
- base::StringToInt64(StringPiece(value.begin() + kMaxAgePrefixLen, |
- value.end()), |
- &seconds); |
+ base::StringToInt64( |
+ StringPiece(value.begin() + kMaxAgePrefixLen, value.end()), |
+ &seconds); |
*result = TimeDelta::FromSeconds(seconds); |
return true; |
} |
@@ -1196,10 +1179,8 @@ bool HttpResponseHeaders::HasStrongValidators() const { |
EnumerateHeader(NULL, "Last-Modified", &last_modified_header); |
std::string date_header; |
EnumerateHeader(NULL, "Date", &date_header); |
- return HttpUtil::HasStrongValidators(GetHttpVersion(), |
- etag_header, |
- last_modified_header, |
- date_header); |
+ return HttpUtil::HasStrongValidators( |
+ GetHttpVersion(), etag_header, last_modified_header, date_header); |
} |
// From RFC 2616: |
@@ -1258,9 +1239,8 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, |
std::string::const_iterator content_range_spec_end = |
content_range_spec.begin() + space_position; |
HttpUtil::TrimLWS(&content_range_spec_begin, &content_range_spec_end); |
- if (!LowerCaseEqualsASCII(content_range_spec_begin, |
- content_range_spec_end, |
- "bytes")) { |
+ if (!LowerCaseEqualsASCII( |
+ content_range_spec_begin, content_range_spec_end, "bytes")) { |
return false; |
} |
@@ -1289,9 +1269,9 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, |
byte_range_resp_spec.begin() + minus_position; |
HttpUtil::TrimLWS(&first_byte_pos_begin, &first_byte_pos_end); |
- bool ok = base::StringToInt64(StringPiece(first_byte_pos_begin, |
- first_byte_pos_end), |
- first_byte_position); |
+ bool ok = base::StringToInt64( |
+ StringPiece(first_byte_pos_begin, first_byte_pos_end), |
+ first_byte_position); |
// Obtain last-byte-pos. |
std::string::const_iterator last_byte_pos_begin = |
@@ -1300,9 +1280,9 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, |
byte_range_resp_spec.end(); |
HttpUtil::TrimLWS(&last_byte_pos_begin, &last_byte_pos_end); |
- ok &= base::StringToInt64(StringPiece(last_byte_pos_begin, |
- last_byte_pos_end), |
- last_byte_position); |
+ ok &= base::StringToInt64( |
+ StringPiece(last_byte_pos_begin, last_byte_pos_end), |
+ last_byte_position); |
if (!ok) { |
*first_byte_position = *last_byte_position = -1; |
return false; |
@@ -1319,15 +1299,14 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, |
// If instance-length == "*". |
std::string::const_iterator instance_length_begin = |
content_range_spec.begin() + slash_position + 1; |
- std::string::const_iterator instance_length_end = |
- content_range_spec.end(); |
+ std::string::const_iterator instance_length_end = content_range_spec.end(); |
HttpUtil::TrimLWS(&instance_length_begin, &instance_length_end); |
if (LowerCaseEqualsASCII(instance_length_begin, instance_length_end, "*")) { |
return false; |
- } else if (!base::StringToInt64(StringPiece(instance_length_begin, |
- instance_length_end), |
- instance_length)) { |
+ } else if (!base::StringToInt64( |
+ StringPiece(instance_length_begin, instance_length_end), |
+ instance_length)) { |
*instance_length = -1; |
return false; |
} |
@@ -1351,9 +1330,8 @@ base::Value* HttpResponseHeaders::NetLogCallback( |
std::string value; |
while (EnumerateHeaderLines(&iterator, &name, &value)) { |
std::string log_value = ElideHeaderValueForNetLog(log_level, name, value); |
- headers->Append( |
- new base::StringValue( |
- base::StringPrintf("%s: %s", name.c_str(), log_value.c_str()))); |
+ headers->Append(new base::StringValue( |
+ base::StringPrintf("%s: %s", name.c_str(), log_value.c_str()))); |
} |
dict->Set("headers", headers); |
return dict; |
@@ -1368,8 +1346,7 @@ bool HttpResponseHeaders::FromNetLogParam( |
const base::DictionaryValue* dict = NULL; |
const base::ListValue* header_list = NULL; |
- if (!event_param || |
- !event_param->GetAsDictionary(&dict) || |
+ if (!event_param || !event_param->GetAsDictionary(&dict) || |
!dict->GetList("headers", &header_list)) { |
return false; |
} |
@@ -1393,7 +1370,7 @@ bool HttpResponseHeaders::FromNetLogParam( |
bool HttpResponseHeaders::IsChunkEncoded() const { |
// Ignore spurious chunked responses from HTTP/1.0 servers and proxies. |
return GetHttpVersion() >= HttpVersion(1, 1) && |
- HasHeaderValue("Transfer-Encoding", "chunked"); |
+ HasHeaderValue("Transfer-Encoding", "chunked"); |
} |
#if defined(SPDY_PROXY_AUTH_ORIGIN) |
@@ -1412,7 +1389,8 @@ bool HttpResponseHeaders::GetDataReductionProxyBypassDuration( |
int64 seconds; |
if (!base::StringToInt64( |
StringPiece(value.begin() + action_prefix.size(), value.end()), |
- &seconds) || seconds < 0) { |
+ &seconds) || |
+ seconds < 0) { |
continue; // In case there is a well formed instruction. |
} |
*duration = TimeDelta::FromSeconds(seconds); |
@@ -1438,15 +1416,15 @@ bool HttpResponseHeaders::GetDataReductionProxyInfo( |
// 'block' takes precedence over 'bypass', so look for it first. |
// TODO(bengr): Reduce checks for 'block' and 'bypass' to a single loop. |
- if (GetDataReductionProxyBypassDuration( |
- "block=", &proxy_info->bypass_duration)) { |
+ if (GetDataReductionProxyBypassDuration("block=", |
+ &proxy_info->bypass_duration)) { |
proxy_info->bypass_all = true; |
return true; |
} |
// Next, look for 'bypass'. |
- if (GetDataReductionProxyBypassDuration( |
- "bypass=", &proxy_info->bypass_duration)) { |
+ if (GetDataReductionProxyBypassDuration("bypass=", |
+ &proxy_info->bypass_duration)) { |
return true; |
} |
return false; |