OLD | NEW |
| (Empty) |
1 #!/usr/bin/python | |
2 | |
3 """ | |
4 Copyright 2013 Google Inc. | |
5 | |
6 Use of this source code is governed by a BSD-style license that can be | |
7 found in the LICENSE file. | |
8 | |
9 HTTP server for our HTML rebaseline viewer. | |
10 """ | |
11 | |
12 # System-level imports | |
13 import argparse | |
14 import BaseHTTPServer | |
15 import json | |
16 import logging | |
17 import os | |
18 import posixpath | |
19 import re | |
20 import shutil | |
21 import socket | |
22 import subprocess | |
23 import thread | |
24 import threading | |
25 import time | |
26 import urllib | |
27 import urlparse | |
28 | |
29 # Must fix up PYTHONPATH before importing from within Skia | |
30 import rs_fixpypath # pylint: disable=W0611 | |
31 | |
32 # Imports from within Skia | |
33 from py.utils import gs_utils | |
34 import buildbot_globals | |
35 import gm_json | |
36 | |
37 # Imports from local dir | |
38 # | |
39 # pylint: disable=C0301 | |
40 # Note: we import results under a different name, to avoid confusion with the | |
41 # Server.results() property. See discussion at | |
42 # https://codereview.chromium.org/195943004/diff/1/gm/rebaseline_server/server.p
y#newcode44 | |
43 # pylint: enable=C0301 | |
44 import compare_configs | |
45 import compare_rendered_pictures | |
46 import compare_to_expectations | |
47 import download_actuals | |
48 import imagediffdb | |
49 import imagepairset | |
50 import results as results_mod | |
51 import writable_expectations as writable_expectations_mod | |
52 | |
53 | |
54 PATHSPLIT_RE = re.compile('/([^/]+)/(.+)') | |
55 | |
56 # A simple dictionary of file name extensions to MIME types. The empty string | |
57 # entry is used as the default when no extension was given or if the extension | |
58 # has no entry in this dictionary. | |
59 MIME_TYPE_MAP = {'': 'application/octet-stream', | |
60 'html': 'text/html', | |
61 'css': 'text/css', | |
62 'png': 'image/png', | |
63 'js': 'application/javascript', | |
64 'json': 'application/json' | |
65 } | |
66 | |
67 # Keys that server.py uses to create the toplevel content header. | |
68 # NOTE: Keep these in sync with static/constants.js | |
69 KEY__EDITS__MODIFICATIONS = 'modifications' | |
70 KEY__EDITS__OLD_RESULTS_HASH = 'oldResultsHash' | |
71 KEY__EDITS__OLD_RESULTS_TYPE = 'oldResultsType' | |
72 KEY__LIVE_EDITS__MODIFICATIONS = 'modifications' | |
73 KEY__LIVE_EDITS__SET_A_DESCRIPTIONS = 'setA' | |
74 KEY__LIVE_EDITS__SET_B_DESCRIPTIONS = 'setB' | |
75 | |
76 DEFAULT_ACTUALS_DIR = results_mod.DEFAULT_ACTUALS_DIR | |
77 DEFAULT_GM_SUMMARIES_BUCKET = download_actuals.GM_SUMMARIES_BUCKET | |
78 DEFAULT_JSON_FILENAME = download_actuals.DEFAULT_JSON_FILENAME | |
79 DEFAULT_PORT = 8888 | |
80 | |
81 PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__)) | |
82 TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(PARENT_DIRECTORY)) | |
83 | |
84 # Directory, relative to PARENT_DIRECTORY, within which the server will serve | |
85 # out static files. | |
86 STATIC_CONTENTS_SUBDIR = 'static' | |
87 # All of the GENERATED_*_SUBDIRS are relative to STATIC_CONTENTS_SUBDIR | |
88 GENERATED_HTML_SUBDIR = 'generated-html' | |
89 GENERATED_IMAGES_SUBDIR = 'generated-images' | |
90 GENERATED_JSON_SUBDIR = 'generated-json' | |
91 | |
92 # Directives associated with various HTTP GET requests. | |
93 GET__LIVE_RESULTS = 'live-results' | |
94 GET__PRECOMPUTED_RESULTS = 'results' | |
95 GET__PREFETCH_RESULTS = 'prefetch' | |
96 GET__STATIC_CONTENTS = 'static' | |
97 | |
98 # Parameters we use within do_GET_live_results() and do_GET_prefetch_results() | |
99 LIVE_PARAM__DOWNLOAD_ONLY_DIFFERING = 'downloadOnlyDifferingImages' | |
100 LIVE_PARAM__SET_A_DIR = 'setADir' | |
101 LIVE_PARAM__SET_A_SECTION = 'setASection' | |
102 LIVE_PARAM__SET_B_DIR = 'setBDir' | |
103 LIVE_PARAM__SET_B_SECTION = 'setBSection' | |
104 | |
105 # How often (in seconds) clients should reload while waiting for initial | |
106 # results to load. | |
107 RELOAD_INTERVAL_UNTIL_READY = 10 | |
108 | |
109 _GM_SUMMARY_TYPES = [ | |
110 results_mod.KEY__HEADER__RESULTS_FAILURES, | |
111 results_mod.KEY__HEADER__RESULTS_ALL, | |
112 ] | |
113 # If --compare-configs is specified, compare these configs. | |
114 CONFIG_PAIRS_TO_COMPARE = [('8888', 'gpu')] | |
115 | |
116 # SKP results that are available to compare. | |
117 # | |
118 # TODO(stephana): We don't actually want to maintain this list of platforms. | |
119 # We are just putting them in here for now, as "convenience" links for testing | |
120 # SKP diffs. | |
121 # Ultimately, we will depend on buildbot steps linking to their own diffs on | |
122 # the shared rebaseline_server instance. | |
123 _SKP_BASE_GS_URL = 'gs://' + buildbot_globals.Get('skp_summaries_bucket') | |
124 _SKP_BASE_REPO_URL = ( | |
125 compare_rendered_pictures.REPO_URL_PREFIX + posixpath.join( | |
126 'expectations', 'skp')) | |
127 _SKP_PLATFORMS = [ | |
128 'Test-Mac10.8-MacMini4.1-GeForce320M-x86_64-Debug', | |
129 'Test-Ubuntu12-ShuttleA-GTX660-x86-Release', | |
130 ] | |
131 | |
132 _HTTP_HEADER_CONTENT_LENGTH = 'Content-Length' | |
133 _HTTP_HEADER_CONTENT_TYPE = 'Content-Type' | |
134 | |
135 _SERVER = None # This gets filled in by main() | |
136 | |
137 | |
138 def _run_command(args, directory): | |
139 """Runs a command and returns stdout as a single string. | |
140 | |
141 Args: | |
142 args: the command to run, as a list of arguments | |
143 directory: directory within which to run the command | |
144 | |
145 Returns: stdout, as a string | |
146 | |
147 Raises an Exception if the command failed (exited with nonzero return code). | |
148 """ | |
149 logging.debug('_run_command: %s in directory %s' % (args, directory)) | |
150 proc = subprocess.Popen(args, cwd=directory, | |
151 stdout=subprocess.PIPE, | |
152 stderr=subprocess.PIPE) | |
153 (stdout, stderr) = proc.communicate() | |
154 if proc.returncode is not 0: | |
155 raise Exception('command "%s" failed in dir "%s": %s' % | |
156 (args, directory, stderr)) | |
157 return stdout | |
158 | |
159 | |
160 def _get_routable_ip_address(): | |
161 """Returns routable IP address of this host (the IP address of its network | |
162 interface that would be used for most traffic, not its localhost | |
163 interface). See http://stackoverflow.com/a/166589 """ | |
164 sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) | |
165 sock.connect(('8.8.8.8', 80)) | |
166 host = sock.getsockname()[0] | |
167 sock.close() | |
168 return host | |
169 | |
170 | |
171 def _create_index(file_path, config_pairs): | |
172 """Creates an index file linking to all results available from this server. | |
173 | |
174 Prior to https://codereview.chromium.org/215503002 , we had a static | |
175 index.html within our repo. But now that the results may or may not include | |
176 config comparisons, index.html needs to be generated differently depending | |
177 on which results are included. | |
178 | |
179 TODO(epoger): Instead of including raw HTML within the Python code, | |
180 consider restoring the index.html file as a template and using django (or | |
181 similar) to fill in dynamic content. | |
182 | |
183 Args: | |
184 file_path: path on local disk to write index to; any directory components | |
185 of this path that do not already exist will be created | |
186 config_pairs: what pairs of configs (if any) we compare actual results of | |
187 """ | |
188 dir_path = os.path.dirname(file_path) | |
189 if not os.path.isdir(dir_path): | |
190 os.makedirs(dir_path) | |
191 with open(file_path, 'w') as file_handle: | |
192 file_handle.write( | |
193 '<!DOCTYPE html><html>' | |
194 '<head><title>rebaseline_server</title></head>' | |
195 '<body><ul>') | |
196 | |
197 if _GM_SUMMARY_TYPES: | |
198 file_handle.write('<li>GM Expectations vs Actuals</li><ul>') | |
199 for summary_type in _GM_SUMMARY_TYPES: | |
200 file_handle.write( | |
201 '\n<li><a href="/{static_directive}/view.html#/view.html?' | |
202 'resultsToLoad=/{results_directive}/{summary_type}">' | |
203 '{summary_type}</a></li>'.format( | |
204 results_directive=GET__PRECOMPUTED_RESULTS, | |
205 static_directive=GET__STATIC_CONTENTS, | |
206 summary_type=summary_type)) | |
207 file_handle.write('</ul>') | |
208 | |
209 if config_pairs: | |
210 file_handle.write( | |
211 '\n<li>Comparing configs within actual GM results</li><ul>') | |
212 for config_pair in config_pairs: | |
213 file_handle.write('<li>%s vs %s:' % config_pair) | |
214 for summary_type in _GM_SUMMARY_TYPES: | |
215 file_handle.write( | |
216 ' <a href="/%s/view.html#/view.html?' | |
217 'resultsToLoad=/%s/%s/%s-vs-%s_%s.json">%s</a>' % ( | |
218 GET__STATIC_CONTENTS, GET__STATIC_CONTENTS, | |
219 GENERATED_JSON_SUBDIR, config_pair[0], config_pair[1], | |
220 summary_type, summary_type)) | |
221 file_handle.write('</li>') | |
222 file_handle.write('</ul>') | |
223 | |
224 if _SKP_PLATFORMS: | |
225 file_handle.write('\n<li>Rendered SKPs:<ul>') | |
226 for builder in _SKP_PLATFORMS: | |
227 file_handle.write( | |
228 '\n<li><a href="../live-view.html#live-view.html?%s">' % | |
229 urllib.urlencode({ | |
230 LIVE_PARAM__SET_A_SECTION: | |
231 gm_json.JSONKEY_EXPECTEDRESULTS, | |
232 LIVE_PARAM__SET_A_DIR: | |
233 posixpath.join(_SKP_BASE_REPO_URL, builder), | |
234 LIVE_PARAM__SET_B_SECTION: | |
235 gm_json.JSONKEY_ACTUALRESULTS, | |
236 LIVE_PARAM__SET_B_DIR: | |
237 posixpath.join(_SKP_BASE_GS_URL, builder), | |
238 })) | |
239 file_handle.write('expected vs actuals on %s</a></li>' % builder) | |
240 file_handle.write( | |
241 '\n<li><a href="../live-view.html#live-view.html?%s">' % | |
242 urllib.urlencode({ | |
243 LIVE_PARAM__SET_A_SECTION: | |
244 gm_json.JSONKEY_ACTUALRESULTS, | |
245 LIVE_PARAM__SET_A_DIR: | |
246 posixpath.join(_SKP_BASE_GS_URL, _SKP_PLATFORMS[0]), | |
247 LIVE_PARAM__SET_B_SECTION: | |
248 gm_json.JSONKEY_ACTUALRESULTS, | |
249 LIVE_PARAM__SET_B_DIR: | |
250 posixpath.join(_SKP_BASE_GS_URL, _SKP_PLATFORMS[1]), | |
251 })) | |
252 file_handle.write('actuals on %s vs %s</a></li>' % ( | |
253 _SKP_PLATFORMS[0], _SKP_PLATFORMS[1])) | |
254 file_handle.write('</li>') | |
255 | |
256 file_handle.write('\n</ul></body></html>') | |
257 | |
258 | |
259 class Server(object): | |
260 """ HTTP server for our HTML rebaseline viewer. """ | |
261 | |
262 def __init__(self, | |
263 actuals_source, | |
264 actuals_dir=DEFAULT_ACTUALS_DIR, | |
265 json_filename=DEFAULT_JSON_FILENAME, | |
266 port=DEFAULT_PORT, export=False, editable=True, | |
267 reload_seconds=0, config_pairs=None, builder_regex_list=None, | |
268 boto_file_path=None, | |
269 imagediffdb_threads=imagediffdb.DEFAULT_NUM_WORKER_THREADS): | |
270 """ | |
271 Args: | |
272 actuals_source: actuals_source.get_builders() -> | |
273 {builder:string -> [ bucket:string, path:string, generation:string ]} | |
274 If None, don't fetch new actual-results files | |
275 at all, just compare to whatever files are already in actuals_dir | |
276 actuals_dir: directory under which we will check out the latest actual | |
277 GM results | |
278 json_filename: basename of the JSON summary file to load for each builder | |
279 port: which TCP port to listen on for HTTP requests | |
280 export: whether to allow HTTP clients on other hosts to access this server | |
281 editable: whether HTTP clients are allowed to submit new GM baselines | |
282 (SKP baseline modifications are performed using an entirely different | |
283 mechanism, not affected by this parameter) | |
284 reload_seconds: polling interval with which to check for new results; | |
285 if 0, don't check for new results at all | |
286 config_pairs: List of (string, string) tuples; for each tuple, compare | |
287 actual results of these two configs. If None or empty, | |
288 don't compare configs at all. | |
289 builder_regex_list: List of regular expressions specifying which builders | |
290 we will process. If None, process all builders. | |
291 boto_file_path: Path to .boto file giving us credentials to access | |
292 Google Storage buckets; if None, we will only be able to access | |
293 public GS buckets. | |
294 imagediffdb_threads: How many threads to spin up within imagediffdb. | |
295 """ | |
296 self._actuals_source = actuals_source | |
297 self._actuals_dir = actuals_dir | |
298 self._json_filename = json_filename | |
299 self._port = port | |
300 self._export = export | |
301 self._editable = editable | |
302 self._reload_seconds = reload_seconds | |
303 self._config_pairs = config_pairs or [] | |
304 self._builder_regex_list = builder_regex_list | |
305 self.truncate_results = False | |
306 | |
307 if boto_file_path: | |
308 self._gs = gs_utils.GSUtils(boto_file_path=boto_file_path) | |
309 else: | |
310 self._gs = gs_utils.GSUtils() | |
311 | |
312 _create_index( | |
313 file_path=os.path.join( | |
314 PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, GENERATED_HTML_SUBDIR, | |
315 "index.html"), | |
316 config_pairs=config_pairs) | |
317 | |
318 # Reentrant lock that must be held whenever updating EITHER of: | |
319 # 1. self._results | |
320 # 2. the expected or actual results on local disk | |
321 self.results_rlock = threading.RLock() | |
322 | |
323 # Create a single ImageDiffDB instance that is used by all our differs. | |
324 self._image_diff_db = imagediffdb.ImageDiffDB( | |
325 gs=self._gs, | |
326 storage_root=os.path.join( | |
327 PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, | |
328 GENERATED_IMAGES_SUBDIR), | |
329 num_worker_threads=imagediffdb_threads) | |
330 | |
331 # This will be filled in by calls to update_results() | |
332 self._results = None | |
333 | |
334 @property | |
335 def results(self): | |
336 """ Returns the most recently generated results, or None if we don't have | |
337 any valid results (update_results() has not completed yet). """ | |
338 return self._results | |
339 | |
340 @property | |
341 def image_diff_db(self): | |
342 """ Returns reference to our ImageDiffDB object.""" | |
343 return self._image_diff_db | |
344 | |
345 @property | |
346 def gs(self): | |
347 """ Returns reference to our GSUtils object.""" | |
348 return self._gs | |
349 | |
350 @property | |
351 def is_exported(self): | |
352 """ Returns true iff HTTP clients on other hosts are allowed to access | |
353 this server. """ | |
354 return self._export | |
355 | |
356 @property | |
357 def is_editable(self): | |
358 """ True iff HTTP clients are allowed to submit new GM baselines. | |
359 | |
360 TODO(epoger): This only pertains to GM baselines; SKP baselines are | |
361 editable whenever expectations vs actuals are shown. | |
362 Once we move the GM baselines to use the same code as the SKP baselines, | |
363 we can delete this property. | |
364 """ | |
365 return self._editable | |
366 | |
367 @property | |
368 def reload_seconds(self): | |
369 """ Returns the result reload period in seconds, or 0 if we don't reload | |
370 results. """ | |
371 return self._reload_seconds | |
372 | |
373 def update_results(self, invalidate=False): | |
374 """ Create or update self._results, based on the latest expectations and | |
375 actuals. | |
376 | |
377 We hold self.results_rlock while we do this, to guarantee that no other | |
378 thread attempts to update either self._results or the underlying files at | |
379 the same time. | |
380 | |
381 Args: | |
382 invalidate: if True, invalidate self._results immediately upon entry; | |
383 otherwise, we will let readers see those results until we | |
384 replace them | |
385 """ | |
386 with self.results_rlock: | |
387 if invalidate: | |
388 self._results = None | |
389 | |
390 if self._actuals_source: | |
391 logging.info( | |
392 'Updating GM result summaries in %s from %s ...' | |
393 % (self._actuals_dir, self._actuals_source.description())) | |
394 | |
395 # Clean out actuals_dir first, in case some builders have gone away | |
396 # since we last ran. | |
397 if os.path.isdir(self._actuals_dir): | |
398 shutil.rmtree(self._actuals_dir) | |
399 | |
400 # Get the list of actuals we care about. | |
401 all_actuals = self._actuals_source.get_builders() | |
402 | |
403 if self._builder_regex_list: | |
404 matching_builders = [] | |
405 for builder in all_actuals: | |
406 for regex in self._builder_regex_list: | |
407 if re.match(regex, builder): | |
408 matching_builders.append(builder) | |
409 break # go on to the next builder, no need to try more regexes | |
410 else: | |
411 matching_builders = all_actuals.keys() | |
412 | |
413 # Download the JSON file for each builder we care about. | |
414 # | |
415 # TODO(epoger): When this is a large number of builders, we would be | |
416 # better off downloading them in parallel! | |
417 for builder in matching_builders: | |
418 self._gs.download_file( | |
419 source_bucket=all_actuals[builder].bucket, | |
420 source_path=all_actuals[builder].path, | |
421 source_generation=all_actuals[builder].generation, | |
422 dest_path=os.path.join(self._actuals_dir, builder, | |
423 self._json_filename), | |
424 create_subdirs_if_needed=True) | |
425 | |
426 # We only update the expectations dir if the server was run with a | |
427 # nonzero --reload argument; otherwise, we expect the user to maintain | |
428 # her own expectations as she sees fit. | |
429 # | |
430 # Because the Skia repo is hosted using git, and git does not | |
431 # support updating a single directory tree, we have to update the entire | |
432 # repo checkout. | |
433 # | |
434 # Because Skia uses depot_tools, we have to update using "gclient sync" | |
435 # instead of raw git commands. | |
436 # | |
437 # TODO(epoger): Fetch latest expectations in some other way. | |
438 # Eric points out that our official documentation recommends an | |
439 # unmanaged Skia checkout, so "gclient sync" will not bring down updated | |
440 # expectations from origin/master-- you'd have to do a "git pull" of | |
441 # some sort instead. | |
442 # However, the live rebaseline_server at | |
443 # http://skia-tree-status.appspot.com/redirect/rebaseline-server (which | |
444 # is probably the only user of the --reload flag!) uses a managed | |
445 # checkout, so "gclient sync" works in that case. | |
446 # Probably the best idea is to avoid all of this nonsense by fetching | |
447 # updated expectations into a temp directory, and leaving the rest of | |
448 # the checkout alone. This could be done using "git show", or by | |
449 # downloading individual expectation JSON files from | |
450 # skia.googlesource.com . | |
451 if self._reload_seconds: | |
452 logging.info( | |
453 'Updating expected GM results in %s by syncing Skia repo ...' % | |
454 compare_to_expectations.DEFAULT_EXPECTATIONS_DIR) | |
455 _run_command(['gclient', 'sync'], TRUNK_DIRECTORY) | |
456 | |
457 self._results = compare_to_expectations.ExpectationComparisons( | |
458 image_diff_db=self._image_diff_db, | |
459 actuals_root=self._actuals_dir, | |
460 diff_base_url=posixpath.join( | |
461 os.pardir, STATIC_CONTENTS_SUBDIR, GENERATED_IMAGES_SUBDIR), | |
462 builder_regex_list=self._builder_regex_list) | |
463 | |
464 json_dir = os.path.join( | |
465 PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, GENERATED_JSON_SUBDIR) | |
466 if not os.path.isdir(json_dir): | |
467 os.makedirs(json_dir) | |
468 | |
469 for config_pair in self._config_pairs: | |
470 config_comparisons = compare_configs.ConfigComparisons( | |
471 configs=config_pair, | |
472 actuals_root=self._actuals_dir, | |
473 generated_images_root=os.path.join( | |
474 PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR, | |
475 GENERATED_IMAGES_SUBDIR), | |
476 diff_base_url=posixpath.join( | |
477 os.pardir, GENERATED_IMAGES_SUBDIR), | |
478 builder_regex_list=self._builder_regex_list) | |
479 for summary_type in _GM_SUMMARY_TYPES: | |
480 gm_json.WriteToFile( | |
481 config_comparisons.get_packaged_results_of_type( | |
482 results_type=summary_type), | |
483 os.path.join( | |
484 json_dir, '%s-vs-%s_%s.json' % ( | |
485 config_pair[0], config_pair[1], summary_type))) | |
486 | |
487 def _result_loader(self, reload_seconds=0): | |
488 """ Call self.update_results(), either once or periodically. | |
489 | |
490 Params: | |
491 reload_seconds: integer; if nonzero, reload results at this interval | |
492 (in which case, this method will never return!) | |
493 """ | |
494 self.update_results() | |
495 logging.info('Initial results loaded. Ready for requests on %s' % self._url) | |
496 if reload_seconds: | |
497 while True: | |
498 time.sleep(reload_seconds) | |
499 self.update_results() | |
500 | |
501 def run(self): | |
502 arg_tuple = (self._reload_seconds,) # start_new_thread needs a tuple, | |
503 # even though it holds just one param | |
504 thread.start_new_thread(self._result_loader, arg_tuple) | |
505 | |
506 if self._export: | |
507 server_address = ('', self._port) | |
508 host = _get_routable_ip_address() | |
509 if self._editable: | |
510 logging.warning('Running with combination of "export" and "editable" ' | |
511 'flags. Users on other machines will ' | |
512 'be able to modify your GM expectations!') | |
513 else: | |
514 host = '127.0.0.1' | |
515 server_address = (host, self._port) | |
516 # pylint: disable=W0201 | |
517 http_server = BaseHTTPServer.HTTPServer(server_address, HTTPRequestHandler) | |
518 self._url = 'http://%s:%d' % (host, http_server.server_port) | |
519 logging.info('Listening for requests on %s' % self._url) | |
520 http_server.serve_forever() | |
521 | |
522 | |
523 class HTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): | |
524 """ HTTP request handlers for various types of queries this server knows | |
525 how to handle (static HTML and Javascript, expected/actual results, etc.) | |
526 """ | |
527 def do_GET(self): | |
528 """ | |
529 Handles all GET requests, forwarding them to the appropriate | |
530 do_GET_* dispatcher. | |
531 | |
532 If we see any Exceptions, return a 404. This fixes http://skbug.com/2147 | |
533 """ | |
534 try: | |
535 logging.debug('do_GET: path="%s"' % self.path) | |
536 if self.path == '' or self.path == '/' or self.path == '/index.html' : | |
537 self.redirect_to('/%s/%s/index.html' % ( | |
538 GET__STATIC_CONTENTS, GENERATED_HTML_SUBDIR)) | |
539 return | |
540 if self.path == '/favicon.ico' : | |
541 self.redirect_to('/%s/favicon.ico' % GET__STATIC_CONTENTS) | |
542 return | |
543 | |
544 # All requests must be of this form: | |
545 # /dispatcher/remainder | |
546 # where 'dispatcher' indicates which do_GET_* dispatcher to run | |
547 # and 'remainder' is the remaining path sent to that dispatcher. | |
548 (dispatcher_name, remainder) = PATHSPLIT_RE.match(self.path).groups() | |
549 dispatchers = { | |
550 GET__LIVE_RESULTS: self.do_GET_live_results, | |
551 GET__PRECOMPUTED_RESULTS: self.do_GET_precomputed_results, | |
552 GET__PREFETCH_RESULTS: self.do_GET_prefetch_results, | |
553 GET__STATIC_CONTENTS: self.do_GET_static, | |
554 } | |
555 dispatcher = dispatchers[dispatcher_name] | |
556 dispatcher(remainder) | |
557 except: | |
558 self.send_error(404) | |
559 raise | |
560 | |
561 def do_GET_precomputed_results(self, results_type): | |
562 """ Handle a GET request for part of the precomputed _SERVER.results object. | |
563 | |
564 Args: | |
565 results_type: string indicating which set of results to return; | |
566 must be one of the results_mod.RESULTS_* constants | |
567 """ | |
568 logging.debug('do_GET_precomputed_results: sending results of type "%s"' % | |
569 results_type) | |
570 # Since we must make multiple calls to the ExpectationComparisons object, | |
571 # grab a reference to it in case it is updated to point at a new | |
572 # ExpectationComparisons object within another thread. | |
573 # | |
574 # TODO(epoger): Rather than using a global variable for the handler | |
575 # to refer to the Server object, make Server a subclass of | |
576 # HTTPServer, and then it could be available to the handler via | |
577 # the handler's .server instance variable. | |
578 results_obj = _SERVER.results | |
579 if results_obj: | |
580 response_dict = results_obj.get_packaged_results_of_type( | |
581 results_type=results_type, reload_seconds=_SERVER.reload_seconds, | |
582 is_editable=_SERVER.is_editable, is_exported=_SERVER.is_exported) | |
583 else: | |
584 now = int(time.time()) | |
585 response_dict = { | |
586 imagepairset.KEY__ROOT__HEADER: { | |
587 results_mod.KEY__HEADER__SCHEMA_VERSION: ( | |
588 results_mod.VALUE__HEADER__SCHEMA_VERSION), | |
589 results_mod.KEY__HEADER__IS_STILL_LOADING: True, | |
590 results_mod.KEY__HEADER__TIME_UPDATED: now, | |
591 results_mod.KEY__HEADER__TIME_NEXT_UPDATE_AVAILABLE: ( | |
592 now + RELOAD_INTERVAL_UNTIL_READY), | |
593 }, | |
594 } | |
595 self.send_json_dict(response_dict) | |
596 | |
597 def _get_live_results_or_prefetch(self, url_remainder, prefetch_only=False): | |
598 """ Handle a GET request for live-generated image diff data. | |
599 | |
600 Args: | |
601 url_remainder: string indicating which image diffs to generate | |
602 prefetch_only: if True, the user isn't waiting around for results | |
603 """ | |
604 param_dict = urlparse.parse_qs(url_remainder) | |
605 download_all_images = ( | |
606 param_dict.get(LIVE_PARAM__DOWNLOAD_ONLY_DIFFERING, [''])[0].lower() | |
607 not in ['1', 'true']) | |
608 setA_dir = param_dict[LIVE_PARAM__SET_A_DIR][0] | |
609 setB_dir = param_dict[LIVE_PARAM__SET_B_DIR][0] | |
610 setA_section = self._validate_summary_section( | |
611 param_dict.get(LIVE_PARAM__SET_A_SECTION, [None])[0]) | |
612 setB_section = self._validate_summary_section( | |
613 param_dict.get(LIVE_PARAM__SET_B_SECTION, [None])[0]) | |
614 | |
615 # If the sets show expectations vs actuals, always show expectations on | |
616 # the left (setA). | |
617 if ((setA_section == gm_json.JSONKEY_ACTUALRESULTS) and | |
618 (setB_section == gm_json.JSONKEY_EXPECTEDRESULTS)): | |
619 setA_dir, setB_dir = setB_dir, setA_dir | |
620 setA_section, setB_section = setB_section, setA_section | |
621 | |
622 # Are we comparing some actuals against expectations stored in the repo? | |
623 # If so, we can allow the user to submit new baselines. | |
624 is_editable = ( | |
625 (setA_section == gm_json.JSONKEY_EXPECTEDRESULTS) and | |
626 (setA_dir.startswith(compare_rendered_pictures.REPO_URL_PREFIX)) and | |
627 (setB_section == gm_json.JSONKEY_ACTUALRESULTS)) | |
628 | |
629 results_obj = compare_rendered_pictures.RenderedPicturesComparisons( | |
630 setA_dir=setA_dir, setB_dir=setB_dir, | |
631 setA_section=setA_section, setB_section=setB_section, | |
632 image_diff_db=_SERVER.image_diff_db, | |
633 diff_base_url='/static/generated-images', | |
634 gs=_SERVER.gs, truncate_results=_SERVER.truncate_results, | |
635 prefetch_only=prefetch_only, download_all_images=download_all_images) | |
636 if prefetch_only: | |
637 self.send_response(200) | |
638 else: | |
639 self.send_json_dict(results_obj.get_packaged_results_of_type( | |
640 results_type=results_mod.KEY__HEADER__RESULTS_ALL, | |
641 is_editable=is_editable)) | |
642 | |
643 def do_GET_live_results(self, url_remainder): | |
644 """ Handle a GET request for live-generated image diff data. | |
645 | |
646 Args: | |
647 url_remainder: string indicating which image diffs to generate | |
648 """ | |
649 logging.debug('do_GET_live_results: url_remainder="%s"' % url_remainder) | |
650 self._get_live_results_or_prefetch( | |
651 url_remainder=url_remainder, prefetch_only=False) | |
652 | |
653 def do_GET_prefetch_results(self, url_remainder): | |
654 """ Prefetch image diff data for a future do_GET_live_results() call. | |
655 | |
656 Args: | |
657 url_remainder: string indicating which image diffs to generate | |
658 """ | |
659 logging.debug('do_GET_prefetch_results: url_remainder="%s"' % url_remainder) | |
660 self._get_live_results_or_prefetch( | |
661 url_remainder=url_remainder, prefetch_only=True) | |
662 | |
663 def do_GET_static(self, path): | |
664 """ Handle a GET request for a file under STATIC_CONTENTS_SUBDIR . | |
665 Only allow serving of files within STATIC_CONTENTS_SUBDIR that is a | |
666 filesystem sibling of this script. | |
667 | |
668 Args: | |
669 path: path to file (within STATIC_CONTENTS_SUBDIR) to retrieve | |
670 """ | |
671 # Strip arguments ('?resultsToLoad=all') from the path | |
672 path = urlparse.urlparse(path).path | |
673 | |
674 logging.debug('do_GET_static: sending file "%s"' % path) | |
675 static_dir = os.path.realpath(os.path.join( | |
676 PARENT_DIRECTORY, STATIC_CONTENTS_SUBDIR)) | |
677 full_path = os.path.realpath(os.path.join(static_dir, path)) | |
678 if full_path.startswith(static_dir): | |
679 self.send_file(full_path) | |
680 else: | |
681 logging.error( | |
682 'Attempted do_GET_static() of path [%s] outside of static dir [%s]' | |
683 % (full_path, static_dir)) | |
684 self.send_error(404) | |
685 | |
686 def do_POST(self): | |
687 """ Handles all POST requests, forwarding them to the appropriate | |
688 do_POST_* dispatcher. """ | |
689 # All requests must be of this form: | |
690 # /dispatcher | |
691 # where 'dispatcher' indicates which do_POST_* dispatcher to run. | |
692 logging.debug('do_POST: path="%s"' % self.path) | |
693 normpath = posixpath.normpath(self.path) | |
694 dispatchers = { | |
695 '/edits': self.do_POST_edits, | |
696 '/live-edits': self.do_POST_live_edits, | |
697 } | |
698 try: | |
699 dispatcher = dispatchers[normpath] | |
700 dispatcher() | |
701 except: | |
702 self.send_error(404) | |
703 raise | |
704 | |
705 def do_POST_edits(self): | |
706 """ Handle a POST request with modifications to GM expectations, in this | |
707 format: | |
708 | |
709 { | |
710 KEY__EDITS__OLD_RESULTS_TYPE: 'all', # type of results that the client | |
711 # loaded and then made | |
712 # modifications to | |
713 KEY__EDITS__OLD_RESULTS_HASH: 39850913, # hash of results when the client | |
714 # loaded them (ensures that the | |
715 # client and server apply | |
716 # modifications to the same base) | |
717 KEY__EDITS__MODIFICATIONS: [ | |
718 # as needed by compare_to_expectations.edit_expectations() | |
719 ... | |
720 ], | |
721 } | |
722 | |
723 Raises an Exception if there were any problems. | |
724 """ | |
725 if not _SERVER.is_editable: | |
726 raise Exception('this server is not running in --editable mode') | |
727 | |
728 content_type = self.headers[_HTTP_HEADER_CONTENT_TYPE] | |
729 if content_type != 'application/json;charset=UTF-8': | |
730 raise Exception('unsupported %s [%s]' % ( | |
731 _HTTP_HEADER_CONTENT_TYPE, content_type)) | |
732 | |
733 content_length = int(self.headers[_HTTP_HEADER_CONTENT_LENGTH]) | |
734 json_data = self.rfile.read(content_length) | |
735 data = json.loads(json_data) | |
736 logging.debug('do_POST_edits: received new GM expectations data [%s]' % | |
737 data) | |
738 | |
739 # Update the results on disk with the information we received from the | |
740 # client. | |
741 # We must hold _SERVER.results_rlock while we do this, to guarantee that | |
742 # no other thread updates expectations (from the Skia repo) while we are | |
743 # updating them (using the info we received from the client). | |
744 with _SERVER.results_rlock: | |
745 oldResultsType = data[KEY__EDITS__OLD_RESULTS_TYPE] | |
746 oldResults = _SERVER.results.get_results_of_type(oldResultsType) | |
747 oldResultsHash = str(hash(repr( | |
748 oldResults[imagepairset.KEY__ROOT__IMAGEPAIRS]))) | |
749 if oldResultsHash != data[KEY__EDITS__OLD_RESULTS_HASH]: | |
750 raise Exception('results of type "%s" changed while the client was ' | |
751 'making modifications. The client should reload the ' | |
752 'results and submit the modifications again.' % | |
753 oldResultsType) | |
754 _SERVER.results.edit_expectations(data[KEY__EDITS__MODIFICATIONS]) | |
755 | |
756 # Read the updated results back from disk. | |
757 # We can do this in a separate thread; we should return our success message | |
758 # to the UI as soon as possible. | |
759 thread.start_new_thread(_SERVER.update_results, (True,)) | |
760 self.send_response(200) | |
761 | |
762 def do_POST_live_edits(self): | |
763 """ Handle a POST request with modifications to SKP expectations, in this | |
764 format: | |
765 | |
766 { | |
767 KEY__LIVE_EDITS__SET_A_DESCRIPTIONS: { | |
768 # setA descriptions from the original data | |
769 }, | |
770 KEY__LIVE_EDITS__SET_B_DESCRIPTIONS: { | |
771 # setB descriptions from the original data | |
772 }, | |
773 KEY__LIVE_EDITS__MODIFICATIONS: [ | |
774 # as needed by writable_expectations.modify() | |
775 ], | |
776 } | |
777 | |
778 Raises an Exception if there were any problems. | |
779 """ | |
780 content_type = self.headers[_HTTP_HEADER_CONTENT_TYPE] | |
781 if content_type != 'application/json;charset=UTF-8': | |
782 raise Exception('unsupported %s [%s]' % ( | |
783 _HTTP_HEADER_CONTENT_TYPE, content_type)) | |
784 | |
785 content_length = int(self.headers[_HTTP_HEADER_CONTENT_LENGTH]) | |
786 json_data = self.rfile.read(content_length) | |
787 data = json.loads(json_data) | |
788 logging.debug('do_POST_live_edits: received new GM expectations data [%s]' % | |
789 data) | |
790 with writable_expectations_mod.WritableExpectations( | |
791 data[KEY__LIVE_EDITS__SET_A_DESCRIPTIONS]) as writable_expectations: | |
792 writable_expectations.modify(data[KEY__LIVE_EDITS__MODIFICATIONS]) | |
793 diffs = writable_expectations.get_diffs() | |
794 # TODO(stephana): Move to a simpler web framework so we don't have to | |
795 # call these functions. See http://skbug.com/2856 ('rebaseline_server: | |
796 # Refactor server to use a simple web framework') | |
797 self.send_response(200) | |
798 self.send_header('Content-type', 'text/plain') | |
799 self.end_headers() | |
800 self.wfile.write(diffs) | |
801 | |
802 def redirect_to(self, url): | |
803 """ Redirect the HTTP client to a different url. | |
804 | |
805 Args: | |
806 url: URL to redirect the HTTP client to | |
807 """ | |
808 self.send_response(301) | |
809 self.send_header('Location', url) | |
810 self.end_headers() | |
811 | |
812 def send_file(self, path): | |
813 """ Send the contents of the file at this path, with a mimetype based | |
814 on the filename extension. | |
815 | |
816 Args: | |
817 path: path of file whose contents to send to the HTTP client | |
818 """ | |
819 # Grab the extension if there is one | |
820 extension = os.path.splitext(path)[1] | |
821 if len(extension) >= 1: | |
822 extension = extension[1:] | |
823 | |
824 # Determine the MIME type of the file from its extension | |
825 mime_type = MIME_TYPE_MAP.get(extension, MIME_TYPE_MAP['']) | |
826 | |
827 # Open the file and send it over HTTP | |
828 if os.path.isfile(path): | |
829 with open(path, 'rb') as sending_file: | |
830 self.send_response(200) | |
831 self.send_header('Content-type', mime_type) | |
832 self.end_headers() | |
833 self.wfile.write(sending_file.read()) | |
834 else: | |
835 self.send_error(404) | |
836 | |
837 def send_json_dict(self, json_dict): | |
838 """ Send the contents of this dictionary in JSON format, with a JSON | |
839 mimetype. | |
840 | |
841 Args: | |
842 json_dict: dictionary to send | |
843 """ | |
844 self.send_response(200) | |
845 self.send_header('Content-type', 'application/json') | |
846 self.end_headers() | |
847 json.dump(json_dict, self.wfile) | |
848 | |
849 def _validate_summary_section(self, section_name): | |
850 """Validates the section we have been requested to read within JSON summary. | |
851 | |
852 Args: | |
853 section_name: which section of the JSON summary file has been requested | |
854 | |
855 Returns: the validated section name | |
856 | |
857 Raises: Exception if an invalid section_name was requested. | |
858 """ | |
859 if section_name not in compare_rendered_pictures.ALLOWED_SECTION_NAMES: | |
860 raise Exception('requested section name "%s" not in allowed list %s' % ( | |
861 section_name, compare_rendered_pictures.ALLOWED_SECTION_NAMES)) | |
862 return section_name | |
863 | |
864 | |
865 def main(): | |
866 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', | |
867 datefmt='%m/%d/%Y %H:%M:%S', | |
868 level=logging.INFO) | |
869 parser = argparse.ArgumentParser() | |
870 parser.add_argument('--actuals-dir', | |
871 help=('Directory into which we will check out the latest ' | |
872 'actual GM results. If this directory does not ' | |
873 'exist, it will be created. Defaults to %(default)s'), | |
874 default=DEFAULT_ACTUALS_DIR) | |
875 parser.add_argument('--boto', | |
876 help=('Path to .boto file giving us credentials to access ' | |
877 'Google Storage buckets. If not specified, we will ' | |
878 'only be able to access public GS buckets (and thus ' | |
879 'won\'t be able to download SKP images).'), | |
880 default='') | |
881 # TODO(epoger): Before https://codereview.chromium.org/310093003 , | |
882 # when this tool downloaded the JSON summaries from skia-autogen, | |
883 # it had an --actuals-revision the caller could specify to download | |
884 # actual results as of a specific point in time. We should add similar | |
885 # functionality when retrieving the summaries from Google Storage. | |
886 parser.add_argument('--builders', metavar='BUILDER_REGEX', nargs='+', | |
887 help=('Only process builders matching these regular ' | |
888 'expressions. If unspecified, process all ' | |
889 'builders.')) | |
890 parser.add_argument('--compare-configs', action='store_true', | |
891 help=('In addition to generating differences between ' | |
892 'expectations and actuals, also generate ' | |
893 'differences between these config pairs: ' | |
894 + str(CONFIG_PAIRS_TO_COMPARE))) | |
895 parser.add_argument('--editable', action='store_true', | |
896 help=('Allow HTTP clients to submit new GM baselines; ' | |
897 'SKP baselines can be edited regardless of this ' | |
898 'setting.')) | |
899 parser.add_argument('--export', action='store_true', | |
900 help=('Instead of only allowing access from HTTP clients ' | |
901 'on localhost, allow HTTP clients on other hosts ' | |
902 'to access this server. WARNING: doing so will ' | |
903 'allow users on other hosts to modify your ' | |
904 'GM expectations, if combined with --editable.')) | |
905 parser.add_argument('--rietveld-issue', | |
906 help=('Download json_filename files from latest trybot' | |
907 'runs on this codereview.chromium.org issue.' | |
908 'Overrides --gm-summaries-bucket.')) | |
909 parser.add_argument('--gm-summaries-bucket', | |
910 help=('Google Cloud Storage bucket to download ' | |
911 'JSON_FILENAME files from. ' | |
912 'Defaults to %(default)s ; if set to ' | |
913 'empty string, just compare to actual-results ' | |
914 'already found in ACTUALS_DIR.'), | |
915 default=DEFAULT_GM_SUMMARIES_BUCKET) | |
916 parser.add_argument('--json-filename', | |
917 help=('JSON summary filename to read for each builder; ' | |
918 'defaults to %(default)s.'), | |
919 default=DEFAULT_JSON_FILENAME) | |
920 parser.add_argument('--port', type=int, | |
921 help=('Which TCP port to listen on for HTTP requests; ' | |
922 'defaults to %(default)s'), | |
923 default=DEFAULT_PORT) | |
924 parser.add_argument('--reload', type=int, | |
925 help=('How often (a period in seconds) to update the ' | |
926 'results. If specified, both expected and actual ' | |
927 'results will be updated by running "gclient sync" ' | |
928 'on your Skia checkout as a whole. ' | |
929 'By default, we do not reload at all, and you ' | |
930 'must restart the server to pick up new data.'), | |
931 default=0) | |
932 parser.add_argument('--threads', type=int, | |
933 help=('How many parallel threads we use to download ' | |
934 'images and generate diffs; defaults to ' | |
935 '%(default)s'), | |
936 default=imagediffdb.DEFAULT_NUM_WORKER_THREADS) | |
937 parser.add_argument('--truncate', action='store_true', | |
938 help=('FOR TESTING ONLY: truncate the set of images we ' | |
939 'process, to speed up testing.')) | |
940 args = parser.parse_args() | |
941 if args.compare_configs: | |
942 config_pairs = CONFIG_PAIRS_TO_COMPARE | |
943 else: | |
944 config_pairs = None | |
945 | |
946 if args.rietveld_issue: | |
947 actuals_source = download_actuals.RietveldIssueActuals(args.rietveld_issue, | |
948 args.json_filename) | |
949 else: | |
950 actuals_source = download_actuals.TipOfTreeActuals(args.gm_summaries_bucket, | |
951 args.json_filename) | |
952 | |
953 global _SERVER | |
954 _SERVER = Server(actuals_source, | |
955 actuals_dir=args.actuals_dir, | |
956 json_filename=args.json_filename, | |
957 port=args.port, export=args.export, editable=args.editable, | |
958 reload_seconds=args.reload, config_pairs=config_pairs, | |
959 builder_regex_list=args.builders, boto_file_path=args.boto, | |
960 imagediffdb_threads=args.threads) | |
961 if args.truncate: | |
962 _SERVER.truncate_results = True | |
963 _SERVER.run() | |
964 | |
965 | |
966 if __name__ == '__main__': | |
967 main() | |
OLD | NEW |