OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/base64.h" | 9 #include "base/base64.h" |
10 #include "base/environment.h" | 10 #include "base/environment.h" |
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
235 const std::string get_hash = FormatGetHash(prefixes); | 235 const std::string get_hash = FormatGetHash(prefixes); |
236 | 236 |
237 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 237 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
238 fetcher->SetRequestContext(request_context_getter_.get()); | 238 fetcher->SetRequestContext(request_context_getter_.get()); |
239 fetcher->SetUploadData("text/plain", get_hash); | 239 fetcher->SetUploadData("text/plain", get_hash); |
240 fetcher->Start(); | 240 fetcher->Start(); |
241 } | 241 } |
242 | 242 |
243 std::string SafeBrowsingProtocolManager::GetV4HashRequest( | 243 std::string SafeBrowsingProtocolManager::GetV4HashRequest( |
244 const std::vector<SBPrefix>& prefixes, | 244 const std::vector<SBPrefix>& prefixes, |
| 245 const std::vector<PlatformType>& platforms, |
245 ThreatType threat_type) { | 246 ThreatType threat_type) { |
246 // Build the request. Client info and client states are not added to the | 247 // Build the request. Client info and client states are not added to the |
247 // request protocol buffer. Client info is passed as params in the url. | 248 // request protocol buffer. Client info is passed as params in the url. |
248 FindFullHashesRequest req; | 249 FindFullHashesRequest req; |
249 ThreatInfo* info = req.mutable_threat_info(); | 250 ThreatInfo* info = req.mutable_threat_info(); |
250 info->add_threat_types(threat_type); | 251 info->add_threat_types(threat_type); |
251 info->add_platform_types(CHROME_PLATFORM); | |
252 info->add_threat_entry_types(URL_EXPRESSION); | 252 info->add_threat_entry_types(URL_EXPRESSION); |
| 253 for (const PlatformType p : platforms) { |
| 254 info->add_platform_types(p); |
| 255 } |
253 for (const SBPrefix& prefix : prefixes) { | 256 for (const SBPrefix& prefix : prefixes) { |
254 std::string hash(reinterpret_cast<const char*>(&prefix), sizeof(SBPrefix)); | 257 std::string hash(reinterpret_cast<const char*>(&prefix), sizeof(SBPrefix)); |
255 info->add_threat_entries()->set_hash(hash); | 258 info->add_threat_entries()->set_hash(hash); |
256 } | 259 } |
257 | 260 |
258 // Serialize and Base64 encode. | 261 // Serialize and Base64 encode. |
259 std::string req_data, req_base64; | 262 std::string req_data, req_base64; |
260 req.SerializeToString(&req_data); | 263 req.SerializeToString(&req_data); |
261 base::Base64Encode(req_data, &req_base64); | 264 base::Base64Encode(req_data, &req_base64); |
262 | 265 |
263 return req_base64; | 266 return req_base64; |
264 } | 267 } |
265 | 268 |
| 269 bool SafeBrowsingProtocolManager::ParseV4HashResponse( |
| 270 const std::string& data, |
| 271 std::vector<SBFullHashResult>* full_hashes, |
| 272 base::TimeDelta* negative_cache_duration) { |
| 273 FindFullHashesResponse response; |
| 274 |
| 275 if (!response.ParseFromString(data)) { |
| 276 // TODO(kcarattini): Add UMA. |
| 277 return false; |
| 278 } |
| 279 |
| 280 if (response.has_negative_cache_duration()) { |
| 281 // Seconds resolution is good enough so we ignore the nanos field. |
| 282 *negative_cache_duration = base::TimeDelta::FromSeconds( |
| 283 response.negative_cache_duration().seconds()); |
| 284 } |
| 285 |
| 286 // Loop over the threat matches and fill in full_hashes. |
| 287 for (const ThreatMatch& match : response.matches()) { |
| 288 // Make sure the platform and threat entry type match. |
| 289 if (!(match.has_threat_entry_type() && |
| 290 match.threat_entry_type() == URL_EXPRESSION && |
| 291 match.has_threat())) { |
| 292 continue; |
| 293 } |
| 294 |
| 295 // Fill in the full hash. |
| 296 SBFullHashResult result; |
| 297 result.hash = StringToSBFullHash(match.threat().hash()); |
| 298 |
| 299 if (match.has_cache_duration()) { |
| 300 // Seconds resolution is good enough so we ignore the nanos field. |
| 301 result.cache_duration = base::TimeDelta::FromSeconds( |
| 302 match.cache_duration().seconds()); |
| 303 } |
| 304 |
| 305 // Different threat types will handle the metadata differently. |
| 306 if (match.has_threat_type() && match.threat_type() == API_ABUSE && |
| 307 match.has_platform_type() && |
| 308 match.platform_type() == CHROME_PLATFORM && |
| 309 match.has_threat_entry_metadata()) { |
| 310 // For API Abuse, store a csv of the returned permissions. |
| 311 for (const ThreatEntryMetadata::MetadataEntry& m : |
| 312 match.threat_entry_metadata().entries()) { |
| 313 if (m.key() == "permission") { |
| 314 result.metadata += m.value() + ","; |
| 315 } |
| 316 } |
| 317 } else { |
| 318 // TODO(kcarattini): Add UMA for unexpected threat type match. |
| 319 return false; |
| 320 } |
| 321 |
| 322 full_hashes->push_back(result); |
| 323 } |
| 324 return true; |
| 325 } |
| 326 |
266 void SafeBrowsingProtocolManager::GetV4FullHashes( | 327 void SafeBrowsingProtocolManager::GetV4FullHashes( |
267 const std::vector<SBPrefix>& prefixes, | 328 const std::vector<SBPrefix>& prefixes, |
| 329 const std::vector<PlatformType>& platforms, |
268 ThreatType threat_type, | 330 ThreatType threat_type, |
269 FullHashCallback callback) { | 331 FullHashCallback callback) { |
270 DCHECK(CalledOnValidThread()); | 332 DCHECK(CalledOnValidThread()); |
271 // TODO(kcarattini): Implement backoff behavior. | 333 // TODO(kcarattini): Implement backoff behavior. |
272 | 334 |
273 std::string req_base64 = GetV4HashRequest(prefixes, threat_type); | 335 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type); |
274 GURL gethash_url = GetV4HashUrl(req_base64); | 336 GURL gethash_url = GetV4HashUrl(req_base64); |
275 | 337 |
276 net::URLFetcher* fetcher = | 338 net::URLFetcher* fetcher = |
277 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, | 339 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, |
278 net::URLFetcher::GET, this) | 340 net::URLFetcher::GET, this) |
279 .release(); | 341 .release(); |
280 // TODO(kcarattini): Implement a new response processor. | |
281 v4_hash_requests_[fetcher] = FullHashDetails(callback, | 342 v4_hash_requests_[fetcher] = FullHashDetails(callback, |
282 false /* is_download */); | 343 false /* is_download */); |
283 | 344 |
284 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 345 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
285 fetcher->SetRequestContext(request_context_getter_.get()); | 346 fetcher->SetRequestContext(request_context_getter_.get()); |
286 fetcher->Start(); | 347 fetcher->Start(); |
287 } | 348 } |
288 | 349 |
289 void SafeBrowsingProtocolManager::GetFullHashesWithApis( | 350 void SafeBrowsingProtocolManager::GetFullHashesWithApis( |
290 const std::vector<SBPrefix>& prefixes, | 351 const std::vector<SBPrefix>& prefixes, |
291 FullHashCallback callback) { | 352 FullHashCallback callback) { |
292 GetV4FullHashes(prefixes, API_ABUSE, callback); | 353 std::vector<PlatformType> platform = {CHROME_PLATFORM}; |
| 354 GetV4FullHashes(prefixes, platform, API_ABUSE, callback); |
293 } | 355 } |
294 | 356 |
295 void SafeBrowsingProtocolManager::GetNextUpdate() { | 357 void SafeBrowsingProtocolManager::GetNextUpdate() { |
296 DCHECK(CalledOnValidThread()); | 358 DCHECK(CalledOnValidThread()); |
297 if (request_.get() || request_type_ != NO_REQUEST) | 359 if (request_.get() || request_type_ != NO_REQUEST) |
298 return; | 360 return; |
299 | 361 |
300 IssueUpdateRequest(); | 362 IssueUpdateRequest(); |
301 } | 363 } |
302 | 364 |
303 // net::URLFetcherDelegate implementation ---------------------------------- | 365 // net::URLFetcherDelegate implementation ---------------------------------- |
304 | 366 |
305 // All SafeBrowsing request responses are handled here. | 367 // All SafeBrowsing request responses are handled here. |
306 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a | 368 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a |
307 // chunk should retry the download and parse of that chunk (and | 369 // chunk should retry the download and parse of that chunk (and |
308 // what back off / how many times to try), and if that effects the | 370 // what back off / how many times to try), and if that effects the |
309 // update back off. For now, a failed parse of the chunk means we | 371 // update back off. For now, a failed parse of the chunk means we |
310 // drop it. This isn't so bad because the next UPDATE_REQUEST we | 372 // drop it. This isn't so bad because the next UPDATE_REQUEST we |
311 // do will report all the chunks we have. If that chunk is still | 373 // do will report all the chunks we have. If that chunk is still |
312 // required, the SafeBrowsing servers will tell us to get it again. | 374 // required, the SafeBrowsing servers will tell us to get it again. |
313 void SafeBrowsingProtocolManager::OnURLFetchComplete( | 375 void SafeBrowsingProtocolManager::OnURLFetchComplete( |
314 const net::URLFetcher* source) { | 376 const net::URLFetcher* source) { |
315 DCHECK(CalledOnValidThread()); | 377 DCHECK(CalledOnValidThread()); |
316 scoped_ptr<const net::URLFetcher> fetcher; | 378 scoped_ptr<const net::URLFetcher> fetcher; |
317 | 379 |
318 HashRequests::iterator it = hash_requests_.find(source); | 380 HashRequests::iterator it = hash_requests_.find(source); |
| 381 HashRequests::iterator v4_it = v4_hash_requests_.find(source); |
319 int response_code = source->GetResponseCode(); | 382 int response_code = source->GetResponseCode(); |
320 net::URLRequestStatus status = source->GetStatus(); | 383 net::URLRequestStatus status = source->GetStatus(); |
321 | 384 |
322 if (it != hash_requests_.end()) { | 385 if (it != hash_requests_.end()) { |
323 // GetHash response. | 386 // GetHash response. |
324 RecordHttpResponseOrErrorCode(kGetHashUmaResponseMetricName, status, | 387 RecordHttpResponseOrErrorCode(kGetHashUmaResponseMetricName, status, |
325 response_code); | 388 response_code); |
326 fetcher.reset(it->first); | |
327 const FullHashDetails& details = it->second; | 389 const FullHashDetails& details = it->second; |
328 std::vector<SBFullHashResult> full_hashes; | 390 std::vector<SBFullHashResult> full_hashes; |
329 base::TimeDelta cache_lifetime; | 391 base::TimeDelta cache_lifetime; |
330 if (status.is_success() && (response_code == net::HTTP_OK || | 392 if (status.is_success() && (response_code == net::HTTP_OK || |
331 response_code == net::HTTP_NO_CONTENT)) { | 393 response_code == net::HTTP_NO_CONTENT)) { |
332 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, | 394 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, |
333 // compared to real (net::HTTP_OK) responses. | 395 // compared to real (net::HTTP_OK) responses. |
334 if (response_code == net::HTTP_OK) | 396 if (response_code == net::HTTP_OK) |
335 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); | 397 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); |
336 else | 398 else |
(...skipping 22 matching lines...) Expand all Loading... |
359 << " failed with error: " << response_code; | 421 << " failed with error: " << response_code; |
360 } | 422 } |
361 } | 423 } |
362 | 424 |
363 // Invoke the callback with full_hashes, even if there was a parse error or | 425 // Invoke the callback with full_hashes, even if there was a parse error or |
364 // an error response code (in which case full_hashes will be empty). The | 426 // an error response code (in which case full_hashes will be empty). The |
365 // caller can't be blocked indefinitely. | 427 // caller can't be blocked indefinitely. |
366 details.callback.Run(full_hashes, cache_lifetime); | 428 details.callback.Run(full_hashes, cache_lifetime); |
367 | 429 |
368 hash_requests_.erase(it); | 430 hash_requests_.erase(it); |
| 431 } else if (v4_it != v4_hash_requests_.end()) { |
| 432 // V4 FindFullHashes response. |
| 433 const FullHashDetails& details = v4_it->second; |
| 434 std::vector<SBFullHashResult> full_hashes; |
| 435 base::TimeDelta negative_cache_duration; |
| 436 if (status.is_success() && response_code == net::HTTP_OK) { |
| 437 // TODO(kcarattini): Add UMA reporting. |
| 438 // TODO(kcarattini): Implement backoff and minimum waiting duration |
| 439 // compliance. |
| 440 std::string data; |
| 441 source->GetResponseAsString(&data); |
| 442 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) { |
| 443 full_hashes.clear(); |
| 444 // TODO(kcarattini): Add UMA reporting. |
| 445 } |
| 446 } else { |
| 447 // TODO(kcarattini): Handle error by setting backoff interval. |
| 448 // TODO(kcarattini): Add UMA reporting. |
| 449 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << |
| 450 source->GetURL() << " failed with error: " << status.error() << |
| 451 " and response code: " << response_code; |
| 452 } |
| 453 |
| 454 // Invoke the callback with full_hashes, even if there was a parse error or |
| 455 // an error response code (in which case full_hashes will be empty). The |
| 456 // caller can't be blocked indefinitely. |
| 457 details.callback.Run(full_hashes, negative_cache_duration); |
| 458 |
| 459 v4_hash_requests_.erase(v4_it); |
369 } else { | 460 } else { |
370 // Update or chunk response. | 461 // Update or chunk response. |
371 RecordHttpResponseOrErrorCode(kGetChunkUmaResponseMetricName, status, | 462 RecordHttpResponseOrErrorCode(kGetChunkUmaResponseMetricName, status, |
372 response_code); | 463 response_code); |
373 fetcher.reset(request_.release()); | 464 fetcher.reset(request_.release()); |
374 | 465 |
375 if (request_type_ == UPDATE_REQUEST || | 466 if (request_type_ == UPDATE_REQUEST || |
376 request_type_ == BACKUP_UPDATE_REQUEST) { | 467 request_type_ == BACKUP_UPDATE_REQUEST) { |
377 if (!fetcher.get()) { | 468 if (!fetcher.get()) { |
378 // We've timed out waiting for an update response, so we've cancelled | 469 // We've timed out waiting for an update response, so we've cancelled |
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
867 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( | 958 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( |
868 FullHashCallback callback, | 959 FullHashCallback callback, |
869 bool is_download) | 960 bool is_download) |
870 : callback(callback), is_download(is_download) {} | 961 : callback(callback), is_download(is_download) {} |
871 | 962 |
872 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} | 963 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} |
873 | 964 |
874 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} | 965 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} |
875 | 966 |
876 } // namespace safe_browsing | 967 } // namespace safe_browsing |
OLD | NEW |