| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "extensions/common/csp_validator.h" | 5 #include "extensions/common/csp_validator.h" |
| 6 | 6 |
| 7 #include <vector> | 7 #include <vector> |
| 8 | 8 |
| 9 #include "base/strings/string_split.h" | 9 #include "base/strings/string_split.h" |
| 10 #include "base/strings/string_tokenizer.h" | 10 #include "base/strings/string_tokenizer.h" |
| (...skipping 24 matching lines...) Expand all Loading... |
| 35 | 35 |
| 36 const char* directive_name; | 36 const char* directive_name; |
| 37 bool seen_in_policy; | 37 bool seen_in_policy; |
| 38 bool is_secure; | 38 bool is_secure; |
| 39 }; | 39 }; |
| 40 | 40 |
| 41 bool HasOnlySecureTokens(base::StringTokenizer& tokenizer, | 41 bool HasOnlySecureTokens(base::StringTokenizer& tokenizer, |
| 42 Manifest::Type type) { | 42 Manifest::Type type) { |
| 43 while (tokenizer.GetNext()) { | 43 while (tokenizer.GetNext()) { |
| 44 std::string source = tokenizer.token(); | 44 std::string source = tokenizer.token(); |
| 45 StringToLowerASCII(&source); | 45 base::StringToLowerASCII(&source); |
| 46 | 46 |
| 47 // Don't alow whitelisting of all hosts. This boils down to: | 47 // Don't alow whitelisting of all hosts. This boils down to: |
| 48 // 1. Maximum of 2 '*' characters. | 48 // 1. Maximum of 2 '*' characters. |
| 49 // 2. Each '*' is either followed by a '.' or preceded by a ':' | 49 // 2. Each '*' is either followed by a '.' or preceded by a ':' |
| 50 int wildcards = 0; | 50 int wildcards = 0; |
| 51 size_t length = source.length(); | 51 size_t length = source.length(); |
| 52 for (size_t i = 0; i < length; ++i) { | 52 for (size_t i = 0; i < length; ++i) { |
| 53 if (source[i] == L'*') { | 53 if (source[i] == L'*') { |
| 54 wildcards++; | 54 wildcards++; |
| 55 if (wildcards > 2) | 55 if (wildcards > 2) |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 129 DirectiveStatus script_src_status(kScriptSrc); | 129 DirectiveStatus script_src_status(kScriptSrc); |
| 130 DirectiveStatus object_src_status(kObjectSrc); | 130 DirectiveStatus object_src_status(kObjectSrc); |
| 131 | 131 |
| 132 for (size_t i = 0; i < directives.size(); ++i) { | 132 for (size_t i = 0; i < directives.size(); ++i) { |
| 133 std::string& input = directives[i]; | 133 std::string& input = directives[i]; |
| 134 base::StringTokenizer tokenizer(input, " \t\r\n"); | 134 base::StringTokenizer tokenizer(input, " \t\r\n"); |
| 135 if (!tokenizer.GetNext()) | 135 if (!tokenizer.GetNext()) |
| 136 continue; | 136 continue; |
| 137 | 137 |
| 138 std::string directive_name = tokenizer.token(); | 138 std::string directive_name = tokenizer.token(); |
| 139 StringToLowerASCII(&directive_name); | 139 base::StringToLowerASCII(&directive_name); |
| 140 | 140 |
| 141 if (UpdateStatus(directive_name, tokenizer, &default_src_status, type)) | 141 if (UpdateStatus(directive_name, tokenizer, &default_src_status, type)) |
| 142 continue; | 142 continue; |
| 143 if (UpdateStatus(directive_name, tokenizer, &script_src_status, type)) | 143 if (UpdateStatus(directive_name, tokenizer, &script_src_status, type)) |
| 144 continue; | 144 continue; |
| 145 if (UpdateStatus(directive_name, tokenizer, &object_src_status, type)) | 145 if (UpdateStatus(directive_name, tokenizer, &object_src_status, type)) |
| 146 continue; | 146 continue; |
| 147 } | 147 } |
| 148 | 148 |
| 149 if (script_src_status.seen_in_policy && !script_src_status.is_secure) | 149 if (script_src_status.seen_in_policy && !script_src_status.is_secure) |
| (...skipping 19 matching lines...) Expand all Loading... |
| 169 | 169 |
| 170 bool seen_sandbox = false; | 170 bool seen_sandbox = false; |
| 171 | 171 |
| 172 for (size_t i = 0; i < directives.size(); ++i) { | 172 for (size_t i = 0; i < directives.size(); ++i) { |
| 173 std::string& input = directives[i]; | 173 std::string& input = directives[i]; |
| 174 base::StringTokenizer tokenizer(input, " \t\r\n"); | 174 base::StringTokenizer tokenizer(input, " \t\r\n"); |
| 175 if (!tokenizer.GetNext()) | 175 if (!tokenizer.GetNext()) |
| 176 continue; | 176 continue; |
| 177 | 177 |
| 178 std::string directive_name = tokenizer.token(); | 178 std::string directive_name = tokenizer.token(); |
| 179 StringToLowerASCII(&directive_name); | 179 base::StringToLowerASCII(&directive_name); |
| 180 | 180 |
| 181 if (directive_name != kSandboxDirectiveName) | 181 if (directive_name != kSandboxDirectiveName) |
| 182 continue; | 182 continue; |
| 183 | 183 |
| 184 seen_sandbox = true; | 184 seen_sandbox = true; |
| 185 | 185 |
| 186 while (tokenizer.GetNext()) { | 186 while (tokenizer.GetNext()) { |
| 187 std::string token = tokenizer.token(); | 187 std::string token = tokenizer.token(); |
| 188 StringToLowerASCII(&token); | 188 base::StringToLowerASCII(&token); |
| 189 | 189 |
| 190 // The same origin token negates the sandboxing. | 190 // The same origin token negates the sandboxing. |
| 191 if (token == kAllowSameOriginToken) | 191 if (token == kAllowSameOriginToken) |
| 192 return false; | 192 return false; |
| 193 | 193 |
| 194 // Platform apps don't allow navigation. | 194 // Platform apps don't allow navigation. |
| 195 if (type == Manifest::TYPE_PLATFORM_APP) { | 195 if (type == Manifest::TYPE_PLATFORM_APP) { |
| 196 if (token == kAllowTopNavigation) | 196 if (token == kAllowTopNavigation) |
| 197 return false; | 197 return false; |
| 198 } | 198 } |
| 199 } | 199 } |
| 200 } | 200 } |
| 201 | 201 |
| 202 return seen_sandbox; | 202 return seen_sandbox; |
| 203 } | 203 } |
| 204 | 204 |
| 205 } // namespace csp_validator | 205 } // namespace csp_validator |
| 206 | 206 |
| 207 } // namespace extensions | 207 } // namespace extensions |
| OLD | NEW |