OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # Run build_server so that files needed by tests are copied to the local | 6 # Run build_server so that files needed by tests are copied to the local |
7 # third_party directory. | 7 # third_party directory. |
8 import build_server | 8 import build_server |
9 build_server.main() | 9 build_server.main() |
10 | 10 |
11 import json | 11 import json |
12 import optparse | 12 import optparse |
13 import os | 13 import os |
14 import posixpath | 14 import posixpath |
15 import sys | 15 import sys |
16 import time | 16 import time |
17 import unittest | 17 import unittest |
18 | 18 |
19 from branch_utility import BranchUtility | 19 from branch_utility import BranchUtility |
20 from chroot_file_system import ChrootFileSystem | 20 from chroot_file_system import ChrootFileSystem |
21 from extensions_paths import CONTENT_PROVIDERS, EXTENSIONS, PUBLIC_TEMPLATES | 21 from extensions_paths import CONTENT_PROVIDERS, EXTENSIONS, PUBLIC_TEMPLATES |
22 from fake_fetchers import ConfigureFakeFetchers | 22 from fake_fetchers import ConfigureFakeFetchers |
23 from third_party.json_schema_compiler import json_parse | 23 from special_paths import SITE_VERIFICATION_FILE |
24 from handler import Handler | 24 from handler import Handler |
25 from link_error_detector import LinkErrorDetector, StringifyBrokenLinks | 25 from link_error_detector import LinkErrorDetector, StringifyBrokenLinks |
26 from local_file_system import LocalFileSystem | 26 from local_file_system import LocalFileSystem |
27 from local_renderer import LocalRenderer | 27 from local_renderer import LocalRenderer |
28 from path_util import AssertIsValid | 28 from path_util import AssertIsValid |
29 from servlet import Request | 29 from servlet import Request |
| 30 from third_party.json_schema_compiler import json_parse |
30 from test_util import ( | 31 from test_util import ( |
31 ChromiumPath, DisableLogging, EnableLogging, ReadFile, Server2Path) | 32 ChromiumPath, DisableLogging, EnableLogging, ReadFile, Server2Path) |
32 | 33 |
33 | 34 |
34 # Arguments set up if __main__ specifies them. | 35 # Arguments set up if __main__ specifies them. |
35 _EXPLICIT_TEST_FILES = None | 36 _EXPLICIT_TEST_FILES = None |
36 _REBASE = False | 37 _REBASE = False |
37 _VERBOSE = False | 38 _VERBOSE = False |
38 | 39 |
39 | 40 |
(...skipping 19 matching lines...) Expand all Loading... |
59 public_files = {} | 60 public_files = {} |
60 for root, dirs, files in os.walk(path, topdown=True): | 61 for root, dirs, files in os.walk(path, topdown=True): |
61 relative_root = root[len(path):].lstrip(os.path.sep) | 62 relative_root = root[len(path):].lstrip(os.path.sep) |
62 dirs[:] = _FilterHidden(dirs) | 63 dirs[:] = _FilterHidden(dirs) |
63 for filename in _FilterHidden(files): | 64 for filename in _FilterHidden(files): |
64 with open(os.path.join(root, filename), 'r') as f: | 65 with open(os.path.join(root, filename), 'r') as f: |
65 request_path = posixpath.join(prefix, relative_root, filename) | 66 request_path = posixpath.join(prefix, relative_root, filename) |
66 public_files[request_path] = f.read() | 67 public_files[request_path] = f.read() |
67 return public_files | 68 return public_files |
68 | 69 |
69 # Public file locations are defined in content_providers.json, sort of. Epic | 70 # Public file locations are defined in content_providers.json, sort of. Epic |
70 # hack to pull them out; list all the files from the directories that | 71 # hack to pull them out; list all the files from the directories that |
71 # Chromium content providers ask for. | 72 # Chromium content providers ask for. |
72 public_files = {} | 73 public_files = {} |
73 content_providers = json_parse.Parse(ReadFile(CONTENT_PROVIDERS)) | 74 content_providers = json_parse.Parse(ReadFile(CONTENT_PROVIDERS)) |
74 for content_provider in content_providers.itervalues(): | 75 for content_provider in content_providers.itervalues(): |
75 if 'chromium' in content_provider: | 76 if 'chromium' in content_provider: |
76 public_files.update(walk(content_provider['chromium']['dir'], | 77 public_files.update(walk(content_provider['chromium']['dir'], |
77 prefix=content_provider['serveFrom'])) | 78 prefix=content_provider['serveFrom'])) |
78 return public_files | 79 return public_files |
79 | 80 |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
152 public_files = _GetPublicFiles() | 153 public_files = _GetPublicFiles() |
153 | 154 |
154 print('Rendering %s public files...' % len(public_files.keys())) | 155 print('Rendering %s public files...' % len(public_files.keys())) |
155 start_time = time.time() | 156 start_time = time.time() |
156 try: | 157 try: |
157 for path, content in public_files.iteritems(): | 158 for path, content in public_files.iteritems(): |
158 AssertIsValid(path) | 159 AssertIsValid(path) |
159 if path.endswith('redirects.json'): | 160 if path.endswith('redirects.json'): |
160 continue | 161 continue |
161 | 162 |
| 163 # The non-example html and md files are served without their file |
| 164 # extensions. |
| 165 path_without_ext, ext = posixpath.splitext(path) |
| 166 if (ext in ('.html', '.md') and |
| 167 '/examples/' not in path and |
| 168 path != SITE_VERIFICATION_FILE): |
| 169 path = path_without_ext |
| 170 |
162 def check_result(response): | 171 def check_result(response): |
163 self.assertEqual(200, response.status, | 172 self.assertEqual(200, response.status, |
164 'Got %s when rendering %s' % (response.status, path)) | 173 'Got %s when rendering %s' % (response.status, path)) |
| 174 |
165 # This is reaaaaally rough since usually these will be tiny templates | 175 # This is reaaaaally rough since usually these will be tiny templates |
166 # that render large files. At least it'll catch zero-length responses. | 176 # that render large files. At least it'll catch zero-length responses. |
167 self.assertTrue(len(response.content) >= len(content), | 177 self.assertTrue(len(response.content) >= len(content), |
168 'Rendered content length was %s vs template content length %s ' | 178 'Rendered content length was %s vs template content length %s ' |
169 'when rendering %s' % (len(response.content), len(content), path)) | 179 'when rendering %s' % (len(response.content), len(content), path)) |
170 | 180 |
171 check_result(Handler(Request.ForTest(path)).Get()) | 181 check_result(Handler(Request.ForTest(path)).Get()) |
172 | 182 |
173 if path.startswith(('apps/', 'extensions/')): | 183 if path.startswith(('apps/', 'extensions/')): |
174 # Make sure that leaving out the .html will temporarily redirect to | 184 # Make sure that adding the .html will temporarily redirect to |
175 # the path with the .html for APIs and articles. | 185 # the path without the .html for APIs and articles. |
176 if '/examples/' not in path: | 186 if '/examples/' not in path: |
177 base, _ = posixpath.splitext(path) | 187 redirect_response = Handler(Request.ForTest(path + '.html')).Get() |
178 self.assertEqual( | 188 self.assertEqual( |
179 ('/' + path, False), | 189 ('/' + path, False), redirect_response.GetRedirect(), |
180 Handler(Request.ForTest(base)).Get().GetRedirect(), | 190 '%s.html did not (temporarily) redirect to %s (status %s)' % |
181 '%s did not (temporarily) redirect to %s.html' % (path, path)) | 191 (path, path, redirect_response.status)) |
182 | 192 |
183 # Make sure including a channel will permanently redirect to the same | 193 # Make sure including a channel will permanently redirect to the same |
184 # path without a channel. | 194 # path without a channel. |
185 for channel in BranchUtility.GetAllChannelNames(): | 195 for channel in BranchUtility.GetAllChannelNames(): |
186 redirect_result = Handler( | 196 redirect_response = Handler( |
187 Request.ForTest(posixpath.join(channel, path))).Get() | 197 Request.ForTest(posixpath.join(channel, path))).Get() |
188 self.assertEqual( | 198 self.assertEqual( |
189 ('/' + path, True), | 199 ('/' + path, True), |
190 redirect_result.GetRedirect(), | 200 redirect_response.GetRedirect(), |
191 '%s did not redirect to strip channel %s' % (path, channel)) | 201 '%s/%s did not (permanently) redirect to %s (status %s)' % |
| 202 (channel, path, path, redirect_response.status)) |
192 | 203 |
193 # Samples are internationalized, test some locales. | 204 # Samples are internationalized, test some locales. |
194 if path.endswith('/samples.html'): | 205 if path.endswith('/samples'): |
195 for lang in ('en-US', 'es', 'ar'): | 206 for lang in ('en-US', 'es', 'ar'): |
196 check_result(Handler(Request.ForTest( | 207 check_result(Handler(Request.ForTest( |
197 path, | 208 path, |
198 headers={'Accept-Language': '%s;q=0.8' % lang})).Get()) | 209 headers={'Accept-Language': '%s;q=0.8' % lang})).Get()) |
199 finally: | 210 finally: |
200 print('Took %s seconds' % (time.time() - start_time)) | 211 print('Took %s seconds' % (time.time() - start_time)) |
201 | 212 |
202 #if _REBASE: | 213 #if _REBASE: |
203 # print('Rebasing broken links with %s newly broken and %s fixed links.' % | 214 # print('Rebasing broken links with %s newly broken and %s fixed links.' % |
204 # (len(newly_broken_links), len(fixed_links))) | 215 # (len(newly_broken_links), len(fixed_links))) |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
238 self.assertEqual(200, response.status) | 249 self.assertEqual(200, response.status) |
239 self.assertTrue(response.content != '') | 250 self.assertTrue(response.content != '') |
240 finally: | 251 finally: |
241 print('Took %s seconds' % (time.time() - start_time)) | 252 print('Took %s seconds' % (time.time() - start_time)) |
242 | 253 |
243 # TODO(jshumway): Check page for broken links (currently prohibited by the | 254 # TODO(jshumway): Check page for broken links (currently prohibited by the |
244 # time it takes to render the pages). | 255 # time it takes to render the pages). |
245 | 256 |
246 @DisableLogging('warning') | 257 @DisableLogging('warning') |
247 def testFileNotFound(self): | 258 def testFileNotFound(self): |
248 response = Handler(Request.ForTest('/extensions/notfound.html')).Get() | 259 response = Handler(Request.ForTest('/extensions/notfound')).Get() |
249 self.assertEqual(404, response.status) | 260 self.assertEqual(404, response.status) |
250 | 261 |
| 262 def testSiteVerificationFile(self): |
| 263 response = Handler(Request.ForTest('/' + SITE_VERIFICATION_FILE)).Get() |
| 264 self.assertEqual(200, response.status) |
| 265 |
251 if __name__ == '__main__': | 266 if __name__ == '__main__': |
252 parser = optparse.OptionParser() | 267 parser = optparse.OptionParser() |
253 parser.add_option('-a', '--all', action='store_true', default=False, | 268 parser.add_option('-a', '--all', action='store_true', default=False, |
254 help='Render all pages, not just the one specified') | 269 help='Render all pages, not just the one specified') |
255 parser.add_option('-r', '--rebase', action='store_true', default=False, | 270 parser.add_option('-r', '--rebase', action='store_true', default=False, |
256 help='Rewrites the known_broken_links.json file with ' | 271 help='Rewrites the known_broken_links.json file with ' |
257 'the current set of broken links') | 272 'the current set of broken links') |
258 parser.add_option('-v', '--verbose', action='store_true', default=False, | 273 parser.add_option('-v', '--verbose', action='store_true', default=False, |
259 help='Show verbose output like currently broken links') | 274 help='Show verbose output like currently broken links') |
260 (opts, args) = parser.parse_args() | 275 (opts, args) = parser.parse_args() |
261 if not opts.all: | 276 if not opts.all: |
262 _EXPLICIT_TEST_FILES = args | 277 _EXPLICIT_TEST_FILES = args |
263 _REBASE = opts.rebase | 278 _REBASE = opts.rebase |
264 _VERBOSE = opts.verbose | 279 _VERBOSE = opts.verbose |
265 # Kill sys.argv because we have our own flags. | 280 # Kill sys.argv because we have our own flags. |
266 sys.argv = [sys.argv[0]] | 281 sys.argv = [sys.argv[0]] |
267 unittest.main() | 282 unittest.main() |
OLD | NEW |