Chromium Code Reviews| Index: extensions/browser/api/web_request/web_request_permissions.cc |
| diff --git a/extensions/browser/api/web_request/web_request_permissions.cc b/extensions/browser/api/web_request/web_request_permissions.cc |
| index 8b2f2f49742858767d6cbab0465b34f81dd181df..e9d19d90b9862065a0ff14938ba760efeee97271 100644 |
| --- a/extensions/browser/api/web_request/web_request_permissions.cc |
| +++ b/extensions/browser/api/web_request/web_request_permissions.cc |
| @@ -8,6 +8,7 @@ |
| #include "base/strings/string_util.h" |
| #include "base/strings/stringprintf.h" |
| #include "chromeos/login/login_state.h" |
| +#include "content/public/browser/child_process_security_policy.h" |
| #include "content/public/browser/resource_request_info.h" |
| #include "extensions/browser/extension_navigation_ui_data.h" |
| #include "extensions/browser/guest_view/web_view/web_view_renderer_state.h" |
| @@ -42,11 +43,13 @@ bool g_allow_all_extension_locations_in_public_session = false; |
| // Returns true if the URL is sensitive and requests to this URL must not be |
| // modified/canceled by extensions, e.g. because it is targeted to the webstore |
| // to check for updates, extension blacklisting, etc. |
| -bool IsSensitiveURL(const GURL& url) { |
| +bool IsSensitiveURL(const GURL& url, bool is_request_from_renderer) { |
| // TODO(battre) Merge this, CanExtensionAccessURL and |
| // PermissionsData::CanAccessPage into one function. |
| bool sensitive_chrome_url = false; |
| - const base::StringPiece& host = url.host_piece(); |
| + base::StringPiece host = url.host_piece(); |
| + while (host.ends_with(".")) |
| + host.remove_suffix(1u); |
| const char kGoogleCom[] = "google.com"; |
| const char kClient[] = "clients"; |
| if (url.DomainIs(kGoogleCom)) { |
| @@ -54,23 +57,32 @@ bool IsSensitiveURL(const GURL& url) { |
| // This protects requests to several internal services such as sync, |
| // extension update pings, captive portal detection, fraudulent certificate |
| // reporting, autofill and others. |
| - if (base::StartsWith(host, kClient, base::CompareCase::SENSITIVE)) { |
| + // |
| + // These URLs are only protected for requests from the browser, not |
| + // for requests from renderers, because clients*.google.com also used |
| + // by websites. |
| + base::StringPiece::size_type pos = host.rfind(kClient); |
|
Devlin
2017/05/04 15:33:29
Is this change to protect subdomains of clients*.g
Devlin
2017/05/04 15:33:29
nit: given the area of code this is called in and
battre
2017/05/04 15:53:46
Sure, will do this if Matt is ok with the general
battre
2017/05/04 15:53:46
Yes, this is to generalize the code. A couple of v
battre
2017/05/10 09:18:38
Done.
|
| + if (!is_request_from_renderer && pos != base::StringPiece::npos) { |
|
mmenke
2017/05/04 15:54:41
So we're only filtering out sensitive requests not
Devlin
2017/05/04 17:31:25
I'm not comfortable with that (yet). We have a *t
mmenke
2017/05/04 17:33:16
So what if those thumbnails are from a clients.goo
Devlin
2017/05/04 18:06:00
For want of a better solution, we'll protect them.
mmenke
2017/05/05 14:35:14
Could we instead have the initiators tag sensitive
Devlin
2017/05/05 16:16:03
Yes - that's what I was mentioning about relying o
mmenke
2017/05/05 16:18:03
Sorry, completely missed that.
battre
2017/05/10 09:18:37
Acknowledged.
|
| bool match = true; |
| - for (base::StringPiece::const_iterator |
| - i = host.begin() + strlen(kClient), |
| - end = host.end() - (strlen(kGoogleCom) + 1); |
| - i != end; ++i) { |
| - if (!isdigit(*i)) { |
| - match = false; |
| - break; |
| + if (pos > 0 && host[pos - 1] != '.') { |
| + match = false; |
| + } else { |
| + for (base::StringPiece::const_iterator |
|
Devlin
2017/05/04 15:33:29
I think this loop is right, but it's kind of a sha
battre
2017/05/10 09:18:37
Leaving this as is. I did not find anything better
|
| + i = host.begin() + pos + strlen(kClient), |
| + end = host.end() - (strlen(kGoogleCom) + 1); |
| + i != end; ++i) { |
| + if (!isdigit(*i)) { |
| + match = false; |
| + break; |
| + } |
| } |
| } |
| sensitive_chrome_url = sensitive_chrome_url || match; |
| } |
| - // This protects requests to safe browsing, link doctor, and possibly |
| - // others. |
| + |
| + // Safebrowsing and Chrome Webstore URLs are also protected for requests |
| + // from renderers. |
| sensitive_chrome_url = sensitive_chrome_url || |
| - url.DomainIs("clients.google.com") || |
| url.DomainIs("sb-ssl.google.com") || |
| (url.DomainIs("chrome.google.com") && |
| base::StartsWith(url.path_piece(), "/webstore", |
| @@ -85,7 +97,7 @@ bool WebRequestPermissions::HideRequest( |
| const extensions::InfoMap* extension_info_map, |
| const net::URLRequest* request, |
| extensions::ExtensionNavigationUIData* navigation_ui_data) { |
| - // Hide requests from the Chrome WebStore App or signin process. |
| + // Hide requests from the Chrome WebStore App, signin process and WebUI. |
| const ResourceRequestInfo* info = ResourceRequestInfo::ForRequest(request); |
| if (info) { |
| int process_id = info->GetChildID(); |
| @@ -100,10 +112,20 @@ bool WebRequestPermissions::HideRequest( |
| process_id)) { |
| return true; |
| } |
| + |
| + // Always hide requests from WebUI. This is ok based on the assumption |
| + // that WebUI and regular sites never share the same renderer process |
| + // and the assumption that WebUI deserves protection from extensions. |
| + if (content::ChildProcessSecurityPolicy::GetInstance()->HasWebUIBindings( |
| + process_id)) { |
|
Devlin
2017/05/04 15:33:29
I'd actually prefer we only do this in the case of
battre
2017/05/04 15:53:46
Sure, will do this if Matt is ok with the general
mmenke
2017/05/04 15:56:33
I'm fine with that.
Devlin
2017/05/04 17:31:25
We could just pass a bool is_webui_process to IsSe
battre
2017/05/10 09:18:37
Done.
|
| + return true; |
| + } |
| } |
| const GURL& url = request->url(); |
| - return IsSensitiveURL(url) || !HasWebRequestScheme(url); |
| + bool is_request_from_renderer = info != nullptr; |
| + return IsSensitiveURL(url, is_request_from_renderer) || |
| + !HasWebRequestScheme(url); |
| } |
| // static |