Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
| 6 | 6 |
| 7 #include <utility> | 7 #include <utility> |
| 8 | 8 |
| 9 #include "base/base64.h" | 9 #include "base/base64.h" |
| 10 #include "base/environment.h" | 10 #include "base/environment.h" |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 234 const std::string get_hash = FormatGetHash(prefixes); | 234 const std::string get_hash = FormatGetHash(prefixes); |
| 235 | 235 |
| 236 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 236 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
| 237 fetcher->SetRequestContext(request_context_getter_.get()); | 237 fetcher->SetRequestContext(request_context_getter_.get()); |
| 238 fetcher->SetUploadData("text/plain", get_hash); | 238 fetcher->SetUploadData("text/plain", get_hash); |
| 239 fetcher->Start(); | 239 fetcher->Start(); |
| 240 } | 240 } |
| 241 | 241 |
| 242 std::string SafeBrowsingProtocolManager::GetV4HashRequest( | 242 std::string SafeBrowsingProtocolManager::GetV4HashRequest( |
| 243 const std::vector<SBPrefix>& prefixes, | 243 const std::vector<SBPrefix>& prefixes, |
| 244 const std::vector<PlatformType>& platforms, | |
| 244 ThreatType threat_type) { | 245 ThreatType threat_type) { |
| 245 // Build the request. Client info and client states are not added to the | 246 // Build the request. Client info and client states are not added to the |
| 246 // request protocol buffer. Client info is passed as params in the url. | 247 // request protocol buffer. Client info is passed as params in the url. |
| 247 FindFullHashesRequest req; | 248 FindFullHashesRequest req; |
| 248 ThreatInfo* info = req.mutable_threat_info(); | 249 ThreatInfo* info = req.mutable_threat_info(); |
| 249 info->add_threat_types(threat_type); | 250 info->add_threat_types(threat_type); |
| 250 info->add_platform_types(CHROME_PLATFORM); | |
| 251 info->add_threat_entry_types(URL_EXPRESSION); | 251 info->add_threat_entry_types(URL_EXPRESSION); |
| 252 for (const PlatformType p : platforms) { | |
| 253 info->add_platform_types(p); | |
| 254 } | |
| 252 for (const SBPrefix& prefix : prefixes) { | 255 for (const SBPrefix& prefix : prefixes) { |
| 253 std::string hash(reinterpret_cast<const char*>(&prefix), sizeof(SBPrefix)); | 256 std::string hash(reinterpret_cast<const char*>(&prefix), sizeof(SBPrefix)); |
| 254 info->add_threat_entries()->set_hash(hash); | 257 info->add_threat_entries()->set_hash(hash); |
| 255 } | 258 } |
| 256 | 259 |
| 257 // Serialize and Base64 encode. | 260 // Serialize and Base64 encode. |
| 258 std::string req_data, req_base64; | 261 std::string req_data, req_base64; |
| 259 req.SerializeToString(&req_data); | 262 req.SerializeToString(&req_data); |
| 260 base::Base64Encode(req_data, &req_base64); | 263 base::Base64Encode(req_data, &req_base64); |
| 261 | 264 |
| 262 return req_base64; | 265 return req_base64; |
| 263 } | 266 } |
| 264 | 267 |
| 268 bool SafeBrowsingProtocolManager::ParseV4HashResponse( | |
| 269 const std::string& data, | |
| 270 std::vector<SBFullHashResult>* full_hashes, | |
| 271 base::TimeDelta* negative_cache_duration) { | |
| 272 FindFullHashesResponse response; | |
| 273 | |
| 274 if (!response.ParseFromString(data)) { | |
| 275 // TODO(kcarattini): Add UMA. | |
| 276 return false; | |
| 277 } | |
| 278 | |
| 279 if (response.has_negative_cache_duration()) { | |
| 280 // Seconds resolution is good enough so we ignore the nanos field. | |
| 281 *negative_cache_duration = base::TimeDelta::FromSeconds( | |
| 282 response.negative_cache_duration().seconds()); | |
| 283 } | |
| 284 | |
| 285 // Loop over the threat matches and fill in full_hashes. | |
| 286 for (const ThreatMatch& match : response.matches()) { | |
| 287 // Make sure the platform and threat entry type match. | |
| 288 if (!(match.has_threat_entry_type() && | |
| 289 match.threat_entry_type() == URL_EXPRESSION && | |
| 290 match.has_threat())) { | |
| 291 continue; | |
| 292 } | |
| 293 | |
| 294 // Fill in the full hash. | |
| 295 SBFullHashResult result; | |
| 296 result.hash = StringToSBFullHash(match.threat().hash()); | |
| 297 | |
| 298 if (match.has_cache_duration()) { | |
| 299 // Seconds resolution is good enough so we ignore the nanos field. | |
| 300 result.cache_duration = base::TimeDelta::FromSeconds( | |
| 301 match.cache_duration().seconds()); | |
| 302 } | |
| 303 | |
| 304 // Different threat types will handle the metadata differently. | |
| 305 if (match.has_threat_type() && match.threat_type() == API_ABUSE && | |
| 306 match.has_platform_type() && | |
| 307 match.platform_type() == CHROME_PLATFORM && | |
| 308 match.has_threat_entry_metadata()) { | |
| 309 // For API Abuse, store a csv of the returned permissions. | |
| 310 for (const ThreatEntryMetadata::MetadataEntry& m : | |
| 311 match.threat_entry_metadata().entries()) { | |
| 312 if (m.key() == "permission") { | |
| 313 result.metadata += m.value() + ","; | |
| 314 } | |
| 315 } | |
| 316 } else { | |
| 317 // TODO(kcarattini): Add UMA for unexpected threat type match. | |
| 318 return false; | |
| 319 } | |
| 320 | |
| 321 full_hashes->push_back(result); | |
| 322 } | |
| 323 return true; | |
| 324 } | |
| 325 | |
| 265 void SafeBrowsingProtocolManager::GetV4FullHashes( | 326 void SafeBrowsingProtocolManager::GetV4FullHashes( |
| 266 const std::vector<SBPrefix>& prefixes, | 327 const std::vector<SBPrefix>& prefixes, |
| 328 const std::vector<PlatformType>& platforms, | |
| 267 ThreatType threat_type, | 329 ThreatType threat_type, |
| 268 FullHashCallback callback) { | 330 FullHashCallback callback) { |
| 269 DCHECK(CalledOnValidThread()); | 331 DCHECK(CalledOnValidThread()); |
| 270 // TODO(kcarattini): Implement backoff behavior. | 332 // TODO(kcarattini): Implement backoff behavior. |
| 271 | 333 |
| 272 std::string req_base64 = GetV4HashRequest(prefixes, threat_type); | 334 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type); |
| 273 GURL gethash_url = GetV4HashUrl(req_base64); | 335 GURL gethash_url = GetV4HashUrl(req_base64); |
| 274 | 336 |
| 275 net::URLFetcher* fetcher = | 337 net::URLFetcher* fetcher = |
| 276 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, | 338 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, |
| 277 net::URLFetcher::GET, this) | 339 net::URLFetcher::GET, this) |
| 278 .release(); | 340 .release(); |
| 279 // TODO(kcarattini): Implement a new response processor. | |
| 280 v4_hash_requests_[fetcher] = FullHashDetails(callback, | 341 v4_hash_requests_[fetcher] = FullHashDetails(callback, |
| 281 false /* is_download */); | 342 false /* is_download */); |
| 282 | 343 |
| 283 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 344 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
| 284 fetcher->SetRequestContext(request_context_getter_.get()); | 345 fetcher->SetRequestContext(request_context_getter_.get()); |
| 285 fetcher->Start(); | 346 fetcher->Start(); |
| 286 } | 347 } |
| 287 | 348 |
| 288 void SafeBrowsingProtocolManager::GetFullHashesWithApis( | 349 void SafeBrowsingProtocolManager::GetFullHashesWithApis( |
| 289 const std::vector<SBPrefix>& prefixes, | 350 const std::vector<SBPrefix>& prefixes, |
| 290 FullHashCallback callback) { | 351 FullHashCallback callback) { |
| 291 GetV4FullHashes(prefixes, API_ABUSE, callback); | 352 std::vector<PlatformType> platform = {CHROME_PLATFORM}; |
|
vakh (old account. dont use)
2016/01/12 23:09:49
platforms?
kcarattini
2016/01/12 23:59:47
I think I'm going to leave this singular to indica
| |
| 353 GetV4FullHashes(prefixes, platform, API_ABUSE, callback); | |
| 292 } | 354 } |
| 293 | 355 |
| 294 void SafeBrowsingProtocolManager::GetNextUpdate() { | 356 void SafeBrowsingProtocolManager::GetNextUpdate() { |
| 295 DCHECK(CalledOnValidThread()); | 357 DCHECK(CalledOnValidThread()); |
| 296 if (request_.get() || request_type_ != NO_REQUEST) | 358 if (request_.get() || request_type_ != NO_REQUEST) |
| 297 return; | 359 return; |
| 298 | 360 |
| 299 IssueUpdateRequest(); | 361 IssueUpdateRequest(); |
| 300 } | 362 } |
| 301 | 363 |
| 302 // net::URLFetcherDelegate implementation ---------------------------------- | 364 // net::URLFetcherDelegate implementation ---------------------------------- |
| 303 | 365 |
| 304 // All SafeBrowsing request responses are handled here. | 366 // All SafeBrowsing request responses are handled here. |
| 305 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a | 367 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a |
| 306 // chunk should retry the download and parse of that chunk (and | 368 // chunk should retry the download and parse of that chunk (and |
| 307 // what back off / how many times to try), and if that effects the | 369 // what back off / how many times to try), and if that effects the |
| 308 // update back off. For now, a failed parse of the chunk means we | 370 // update back off. For now, a failed parse of the chunk means we |
| 309 // drop it. This isn't so bad because the next UPDATE_REQUEST we | 371 // drop it. This isn't so bad because the next UPDATE_REQUEST we |
| 310 // do will report all the chunks we have. If that chunk is still | 372 // do will report all the chunks we have. If that chunk is still |
| 311 // required, the SafeBrowsing servers will tell us to get it again. | 373 // required, the SafeBrowsing servers will tell us to get it again. |
| 312 void SafeBrowsingProtocolManager::OnURLFetchComplete( | 374 void SafeBrowsingProtocolManager::OnURLFetchComplete( |
| 313 const net::URLFetcher* source) { | 375 const net::URLFetcher* source) { |
| 314 DCHECK(CalledOnValidThread()); | 376 DCHECK(CalledOnValidThread()); |
| 315 scoped_ptr<const net::URLFetcher> fetcher; | 377 scoped_ptr<const net::URLFetcher> fetcher; |
| 316 | 378 |
| 317 HashRequests::iterator it = hash_requests_.find(source); | 379 HashRequests::iterator it = hash_requests_.find(source); |
| 380 HashRequests::iterator v4_it = v4_hash_requests_.find(source); | |
| 318 int response_code = source->GetResponseCode(); | 381 int response_code = source->GetResponseCode(); |
| 319 net::URLRequestStatus status = source->GetStatus(); | 382 net::URLRequestStatus status = source->GetStatus(); |
| 320 RecordHttpResponseOrErrorCode(kUmaHashResponseMetricName, status, | 383 RecordHttpResponseOrErrorCode(kUmaHashResponseMetricName, status, |
| 321 response_code); | 384 response_code); |
| 322 if (it != hash_requests_.end()) { | 385 if (it != hash_requests_.end()) { |
| 323 // GetHash response. | 386 // GetHash response. |
| 324 fetcher.reset(it->first); | |
| 325 const FullHashDetails& details = it->second; | 387 const FullHashDetails& details = it->second; |
| 326 std::vector<SBFullHashResult> full_hashes; | 388 std::vector<SBFullHashResult> full_hashes; |
| 327 base::TimeDelta cache_lifetime; | 389 base::TimeDelta cache_lifetime; |
| 328 if (status.is_success() && (response_code == net::HTTP_OK || | 390 if (status.is_success() && (response_code == net::HTTP_OK || |
| 329 response_code == net::HTTP_NO_CONTENT)) { | 391 response_code == net::HTTP_NO_CONTENT)) { |
| 330 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, | 392 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, |
| 331 // compared to real (net::HTTP_OK) responses. | 393 // compared to real (net::HTTP_OK) responses. |
| 332 if (response_code == net::HTTP_OK) | 394 if (response_code == net::HTTP_OK) |
| 333 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); | 395 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); |
| 334 else | 396 else |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 357 << " failed with error: " << response_code; | 419 << " failed with error: " << response_code; |
| 358 } | 420 } |
| 359 } | 421 } |
| 360 | 422 |
| 361 // Invoke the callback with full_hashes, even if there was a parse error or | 423 // Invoke the callback with full_hashes, even if there was a parse error or |
| 362 // an error response code (in which case full_hashes will be empty). The | 424 // an error response code (in which case full_hashes will be empty). The |
| 363 // caller can't be blocked indefinitely. | 425 // caller can't be blocked indefinitely. |
| 364 details.callback.Run(full_hashes, cache_lifetime); | 426 details.callback.Run(full_hashes, cache_lifetime); |
| 365 | 427 |
| 366 hash_requests_.erase(it); | 428 hash_requests_.erase(it); |
| 429 } else if (v4_it != v4_hash_requests_.end()) { | |
| 430 // V4 FindFullHashes response. | |
| 431 const FullHashDetails& details = v4_it->second; | |
| 432 std::vector<SBFullHashResult> full_hashes; | |
| 433 base::TimeDelta negative_cache_duration; | |
| 434 if (status.is_success() && response_code == net::HTTP_OK) { | |
| 435 // TODO(kcarattini): Add UMA reporting. | |
| 436 // TODO(kcarattini): Implement backoff and minimum waiting duration | |
| 437 // compliance. | |
| 438 std::string data; | |
| 439 source->GetResponseAsString(&data); | |
| 440 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) { | |
| 441 full_hashes.clear(); | |
| 442 // TODO(kcarattini): Add UMA reporting. | |
| 443 } | |
| 444 } else { | |
| 445 // TODO(kcarattini): Handle error by setting backoff interval. | |
| 446 // TODO(kcarattini): Add UMA reporting. | |
| 447 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << | |
| 448 source->GetURL() << " failed with error: " << status.error() << | |
| 449 " and response code: " << response_code; | |
| 450 } | |
| 451 | |
| 452 // Invoke the callback with full_hashes, even if there was a parse error or | |
| 453 // an error response code (in which case full_hashes will be empty). The | |
| 454 // caller can't be blocked indefinitely. | |
| 455 details.callback.Run(full_hashes, negative_cache_duration); | |
| 456 | |
| 457 v4_hash_requests_.erase(v4_it); | |
| 367 } else { | 458 } else { |
| 368 // Update or chunk response. | 459 // Update or chunk response. |
| 369 fetcher.reset(request_.release()); | 460 fetcher.reset(request_.release()); |
| 370 | 461 |
| 371 if (request_type_ == UPDATE_REQUEST || | 462 if (request_type_ == UPDATE_REQUEST || |
| 372 request_type_ == BACKUP_UPDATE_REQUEST) { | 463 request_type_ == BACKUP_UPDATE_REQUEST) { |
| 373 if (!fetcher.get()) { | 464 if (!fetcher.get()) { |
| 374 // We've timed out waiting for an update response, so we've cancelled | 465 // We've timed out waiting for an update response, so we've cancelled |
| 375 // the update request and scheduled a new one. Ignore this response. | 466 // the update request and scheduled a new one. Ignore this response. |
| 376 return; | 467 return; |
| (...skipping 486 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 863 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( | 954 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( |
| 864 FullHashCallback callback, | 955 FullHashCallback callback, |
| 865 bool is_download) | 956 bool is_download) |
| 866 : callback(callback), is_download(is_download) {} | 957 : callback(callback), is_download(is_download) {} |
| 867 | 958 |
| 868 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} | 959 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} |
| 869 | 960 |
| 870 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} | 961 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} |
| 871 | 962 |
| 872 } // namespace safe_browsing | 963 } // namespace safe_browsing |
| OLD | NEW |