Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(41)

Side by Side Diff: chrome/common/extensions/docs/server2/integration_test.py

Issue 111643007: Docserver: Make broken link integration test warning more visible. (Closed) Base URL: http://git.chromium.org/chromium/src.git@master
Patch Set: . Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved. 2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 # Run build_server so that files needed by tests are copied to the local 6 # Run build_server so that files needed by tests are copied to the local
7 # third_party directory. 7 # third_party directory.
8 import build_server 8 import build_server
9 build_server.main() 9 build_server.main()
10 10
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
77 #print("Checking for broken links...") 77 #print("Checking for broken links...")
78 #start_time = time.time() 78 #start_time = time.time()
79 #link_error_detector = LinkErrorDetector( 79 #link_error_detector = LinkErrorDetector(
80 # # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix. 80 # # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix.
81 # ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS), 81 # ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS),
82 # lambda path: Handler(Request.ForTest(path)).Get(), 82 # lambda path: Handler(Request.ForTest(path)).Get(),
83 # 'templates/public', 83 # 'templates/public',
84 # ('extensions/index.html', 'apps/about_apps.html')) 84 # ('extensions/index.html', 'apps/about_apps.html'))
85 85
86 #broken_links = link_error_detector.GetBrokenLinks() 86 #broken_links = link_error_detector.GetBrokenLinks()
87 #if broken_links and _VERBOSE: 87 #if broken_links:
88 # print('The broken links are:') 88 # print('Found %d broken links.' % (
89 # print(StringifyBrokenLinks(broken_links)) 89 # len(broken_links)))
90 # if _VERBOSE:
91 # print(StringifyBrokenLinks(broken_links))
90 92
91 #broken_links_set = set(broken_links) 93 #broken_links_set = set(broken_links)
92 94
93 #known_broken_links_path = os.path.join( 95 #known_broken_links_path = os.path.join(
94 # sys.path[0], 'known_broken_links.json') 96 # sys.path[0], 'known_broken_links.json')
95 #try: 97 #try:
96 # with open(known_broken_links_path, 'r') as f: 98 # with open(known_broken_links_path, 'r') as f:
97 # # The JSON file converts tuples and sets into lists, and for this 99 # # The JSON file converts tuples and sets into lists, and for this
98 # # set union/difference logic they need to be converted back. 100 # # set union/difference logic they need to be converted back.
99 # known_broken_links = set(tuple(item) for item in json.load(f)) 101 # known_broken_links = set(tuple(item) for item in json.load(f))
100 #except IOError: 102 #except IOError:
101 # known_broken_links = set() 103 # known_broken_links = set()
102 104
103 #newly_broken_links = broken_links_set - known_broken_links 105 #newly_broken_links = broken_links_set - known_broken_links
104 #fixed_links = known_broken_links - broken_links_set 106 #fixed_links = known_broken_links - broken_links_set
105 107
106 #if _REBASE:
107 # print('Rebasing broken links with %s newly broken and %s fixed links.' %
108 # (len(newly_broken_links), len(fixed_links)))
109 # with open(known_broken_links_path, 'w') as f:
110 # json.dump(broken_links, f,
111 # indent=2, separators=(',', ': '), sort_keys=True)
112 #else:
113 # if fixed_links or newly_broken_links:
114 # print('Found %s broken links, and some have changed. '
115 # 'If this is acceptable or expected then run %s with the --rebase '
116 # 'option.' % (len(broken_links), os.path.split(__file__)[-1]))
117 # elif broken_links:
118 # print('Found %s broken links, but there were no changes.' %
119 # len(broken_links))
120 # if fixed_links:
121 # print('%s broken links have been fixed:' % len(fixed_links))
122 # print(StringifyBrokenLinks(fixed_links))
123 # if newly_broken_links:
124 # print('There are %s new broken links:' % len(newly_broken_links))
125 # print(StringifyBrokenLinks(newly_broken_links))
126 # self.fail('See logging for details.')
127
128 #print('Took %s seconds.' % (time.time() - start_time)) 108 #print('Took %s seconds.' % (time.time() - start_time))
129 109
130 #print('Searching for orphaned pages...') 110 #print('Searching for orphaned pages...')
131 #start_time = time.time() 111 #start_time = time.time()
132 #orphaned_pages = link_error_detector.GetOrphanedPages() 112 #orphaned_pages = link_error_detector.GetOrphanedPages()
133 #if orphaned_pages: 113 #if orphaned_pages:
134 # # TODO(jshumway): Test should fail when orphaned pages are detected. 114 # # TODO(jshumway): Test should fail when orphaned pages are detected.
135 # print('Warning: Found %d orphaned pages:' % len(orphaned_pages)) 115 # print('Found %d orphaned pages:' % len(orphaned_pages))
136 # for page in orphaned_pages: 116 # for page in orphaned_pages:
137 # print(page) 117 # print(page)
138 #print('Took %s seconds.' % (time.time() - start_time)) 118 #print('Took %s seconds.' % (time.time() - start_time))
139 119
140 public_files = _GetPublicFiles() 120 public_files = _GetPublicFiles()
141 121
142 print('Rendering %s public files...' % len(public_files.keys())) 122 print('Rendering %s public files...' % len(public_files.keys()))
143 start_time = time.time() 123 start_time = time.time()
144 try: 124 try:
145 for path, content in public_files.iteritems(): 125 for path, content in public_files.iteritems():
(...skipping 27 matching lines...) Expand all
173 153
174 # Samples are internationalized, test some locales. 154 # Samples are internationalized, test some locales.
175 if path.endswith('/samples.html'): 155 if path.endswith('/samples.html'):
176 for lang in ['en-US', 'es', 'ar']: 156 for lang in ['en-US', 'es', 'ar']:
177 check_result(Handler(Request.ForTest( 157 check_result(Handler(Request.ForTest(
178 path, 158 path,
179 headers={'Accept-Language': '%s;q=0.8' % lang})).Get()) 159 headers={'Accept-Language': '%s;q=0.8' % lang})).Get())
180 finally: 160 finally:
181 print('Took %s seconds' % (time.time() - start_time)) 161 print('Took %s seconds' % (time.time() - start_time))
182 162
163 #if _REBASE:
164 # print('Rebasing broken links with %s newly broken and %s fixed links.' %
165 # (len(newly_broken_links), len(fixed_links)))
166 # with open(known_broken_links_path, 'w') as f:
167 # json.dump(broken_links, f,
168 # indent=2, separators=(',', ': '), sort_keys=True)
169 #else:
170 # if fixed_links or newly_broken_links:
171 # print('**********************************************\n'
172 # 'CHANGE DETECTED IN BROKEN LINKS WITHOUT REBASE\n'
173 # '**********************************************')
174 # print('Found %s broken links, and some have changed. '
175 # 'If this is acceptable or expected then run %s with the --rebase '
176 # 'option.' % (len(broken_links), os.path.split(__file__)[-1]))
177 # elif broken_links:
178 # print('%s existing broken links' % len(broken_links))
179 # if fixed_links:
180 # print('%s broken links have been fixed:' % len(fixed_links))
181 # print(StringifyBrokenLinks(fixed_links))
182 # if newly_broken_links:
183 # print('There are %s new broken links:' % len(newly_broken_links))
184 # print(StringifyBrokenLinks(newly_broken_links))
185 # self.fail('See logging for details.')
186
183 # TODO(kalman): Move this test elsewhere, it's not an integration test. 187 # TODO(kalman): Move this test elsewhere, it's not an integration test.
184 # Perhaps like "presubmit_tests" or something. 188 # Perhaps like "presubmit_tests" or something.
185 def testExplicitFiles(self): 189 def testExplicitFiles(self):
186 '''Tests just the files in _EXPLICIT_TEST_FILES. 190 '''Tests just the files in _EXPLICIT_TEST_FILES.
187 ''' 191 '''
188 if _EXPLICIT_TEST_FILES is None: 192 if _EXPLICIT_TEST_FILES is None:
189 return 193 return
190 for filename in _EXPLICIT_TEST_FILES: 194 for filename in _EXPLICIT_TEST_FILES:
191 print('Rendering %s...' % filename) 195 print('Rendering %s...' % filename)
192 start_time = time.time() 196 start_time = time.time()
(...skipping 22 matching lines...) Expand all
215 parser.add_option('-v', '--verbose', action='store_true', default=False, 219 parser.add_option('-v', '--verbose', action='store_true', default=False,
216 help='Show verbose output like currently broken links') 220 help='Show verbose output like currently broken links')
217 (opts, args) = parser.parse_args() 221 (opts, args) = parser.parse_args()
218 if not opts.all: 222 if not opts.all:
219 _EXPLICIT_TEST_FILES = args 223 _EXPLICIT_TEST_FILES = args
220 _REBASE = opts.rebase 224 _REBASE = opts.rebase
221 _VERBOSE = opts.verbose 225 _VERBOSE = opts.verbose
222 # Kill sys.argv because we have our own flags. 226 # Kill sys.argv because we have our own flags.
223 sys.argv = [sys.argv[0]] 227 sys.argv = [sys.argv[0]]
224 unittest.main() 228 unittest.main()
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698