Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import os | 5 import os |
| 6 | 6 |
| 7 from core import path_util | 7 from core import path_util |
| 8 from core import perf_benchmark | 8 from core import perf_benchmark |
| 9 | 9 |
| 10 from telemetry import benchmark | 10 from telemetry import benchmark |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 72 ps = story.StorySet(base_dir=os.getcwd() + os.sep, | 72 ps = story.StorySet(base_dir=os.getcwd() + os.sep, |
| 73 serving_dirs=serving_dirs) | 73 serving_dirs=serving_dirs) |
| 74 for url in page_urls: | 74 for url in page_urls: |
| 75 ps.AddStory(page_module.Page( | 75 ps.AddStory(page_module.Page( |
| 76 url, ps, ps.base_dir, | 76 url, ps, ps.base_dir, |
| 77 shared_page_state_class=shared_page_state_class)) | 77 shared_page_state_class=shared_page_state_class)) |
| 78 return ps | 78 return ps |
| 79 | 79 |
| 80 | 80 |
| 81 def _ComputeTraceEventsThreadTimeForBlinkPerf( | 81 def _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 82 renderer_thread, trace_events_to_measure): | 82 model, renderer_thread, trace_events_to_measure): |
| 83 """ Compute the CPU duration for each of |trace_events_to_measure| during | 83 """ Compute the CPU duration for each of |trace_events_to_measure| during |
| 84 blink_perf test. | 84 blink_perf test. |
| 85 | 85 |
| 86 Args: | 86 Args: |
| 87 renderer_thread: the renderer thread which run blink_perf test. | 87 renderer_thread: the renderer thread which run blink_perf test. |
| 88 trace_events_to_measure: a list of string names of trace events to measure | 88 trace_events_to_measure: a list of string names of trace events to measure |
| 89 CPU duration for. | 89 CPU duration for. |
| 90 | 90 |
| 91 Returns: | 91 Returns: |
| 92 a dictionary in which each key is a trace event' name (from | 92 a dictionary in which each key is a trace event' name (from |
| 93 |trace_events_to_measure| list), and value is a list of numbers that | 93 |trace_events_to_measure| list), and value is a list of numbers that |
| 94 represents to total cpu time of that trace events in each blink_perf test. | 94 represents to total cpu time of that trace events in each blink_perf test. |
| 95 """ | 95 """ |
| 96 trace_cpu_time_metrics = {} | 96 trace_cpu_time_metrics = {} |
| 97 | 97 |
| 98 # Collect the bounds of "blink_perf.runTest" events. | 98 # Collect the bounds of "blink_perf.runTest" events. |
| 99 test_runs_bounds = [] | 99 test_runs_bounds = [] |
| 100 for event in renderer_thread.async_slices: | 100 for event in renderer_thread.async_slices: |
| 101 if event.name == "blink_perf.runTest": | 101 if event.name == "blink_perf.runTest": |
| 102 test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) | 102 test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) |
| 103 test_runs_bounds.sort(key=lambda b: b.min) | 103 test_runs_bounds.sort(key=lambda b: b.min) |
| 104 | 104 |
| 105 for t in trace_events_to_measure: | 105 for t in trace_events_to_measure: |
| 106 trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) | 106 trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) |
| 107 | 107 |
| 108 for event_name in trace_events_to_measure: | 108 for event_name in trace_events_to_measure: |
| 109 curr_test_runs_bound_index = 0 | 109 curr_test_runs_bound_index = 0 |
| 110 for event in renderer_thread.IterAllSlicesOfName(event_name): | 110 seen_uuids = set() |
| 111 for event in model.IterAllEventsOfName(event_name): | |
| 112 # Trace events can be duplicated in some cases. Filter out trace events | |
| 113 # that have duplicated uuid. | |
|
nednguyen
2017/05/05 15:11:26
Daniel: I found about this when BlobRequest metric
| |
| 114 event_uuid = None | |
| 115 if event.args: | |
| 116 event_uuid = event.args.get('uuid') | |
| 117 if event_uuid and event_uuid in seen_uuids: | |
| 118 continue | |
| 119 elif event_uuid: | |
| 120 seen_uuids.add(event_uuid) | |
| 111 while (curr_test_runs_bound_index < len(test_runs_bounds) and | 121 while (curr_test_runs_bound_index < len(test_runs_bounds) and |
| 112 event.start > test_runs_bounds[curr_test_runs_bound_index].max): | 122 event.start > test_runs_bounds[curr_test_runs_bound_index].max): |
| 113 curr_test_runs_bound_index += 1 | 123 curr_test_runs_bound_index += 1 |
| 114 if curr_test_runs_bound_index >= len(test_runs_bounds): | 124 if curr_test_runs_bound_index >= len(test_runs_bounds): |
| 115 break | 125 break |
| 116 curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] | 126 curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] |
| 117 intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( | 127 intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( |
| 118 curr_test_bound, bounds.Bounds.CreateFromEvent(event)) | 128 curr_test_bound, bounds.Bounds.CreateFromEvent(event)) |
| 119 if event.thread_duration and event.duration: | 129 if event.thread_duration and event.duration: |
| 120 intersect_cpu_time = ( | 130 intersect_cpu_time = ( |
| 121 intersect_wall_time * event.thread_duration / event.duration) | 131 intersect_wall_time * event.thread_duration / event.duration) |
| 122 else: | 132 else: |
| 123 intersect_cpu_time = intersect_wall_time | 133 intersect_cpu_time = intersect_wall_time |
| 124 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( | 134 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( |
| 125 intersect_cpu_time) | 135 intersect_cpu_time) |
| 126 | |
| 127 return trace_cpu_time_metrics | 136 return trace_cpu_time_metrics |
| 128 | 137 |
| 129 | 138 |
| 130 | |
| 131 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): | 139 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
| 132 """Tuns a blink performance test and reports the results.""" | 140 """Tuns a blink performance test and reports the results.""" |
| 133 | 141 |
| 134 def __init__(self): | 142 def __init__(self): |
| 135 super(_BlinkPerfMeasurement, self).__init__() | 143 super(_BlinkPerfMeasurement, self).__init__() |
| 136 with open(os.path.join(os.path.dirname(__file__), | 144 with open(os.path.join(os.path.dirname(__file__), |
| 137 'blink_perf.js'), 'r') as f: | 145 'blink_perf.js'), 'r') as f: |
| 138 self._blink_perf_js = f.read() | 146 self._blink_perf_js = f.read() |
| 139 | 147 |
| 140 def WillNavigateToPage(self, page, tab): | 148 def WillNavigateToPage(self, page, tab): |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 200 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): | 208 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): |
| 201 trace_data = self._ContinueTestRunWithTracing(tab) | 209 trace_data = self._ContinueTestRunWithTracing(tab) |
| 202 trace_value = trace.TraceValue(page, trace_data) | 210 trace_value = trace.TraceValue(page, trace_data) |
| 203 results.AddValue(trace_value) | 211 results.AddValue(trace_value) |
| 204 | 212 |
| 205 trace_events_to_measure = tab.EvaluateJavaScript( | 213 trace_events_to_measure = tab.EvaluateJavaScript( |
| 206 'window.testRunner.traceEventsToMeasure') | 214 'window.testRunner.traceEventsToMeasure') |
| 207 model = model_module.TimelineModel(trace_data) | 215 model = model_module.TimelineModel(trace_data) |
| 208 renderer_thread = model.GetRendererThreadFromTabId(tab.id) | 216 renderer_thread = model.GetRendererThreadFromTabId(tab.id) |
| 209 trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf( | 217 trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 210 renderer_thread, trace_events_to_measure) | 218 model, renderer_thread, trace_events_to_measure) |
| 211 | 219 |
| 212 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') | 220 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') |
| 213 | 221 |
| 214 for line in log.splitlines(): | 222 for line in log.splitlines(): |
| 215 if line.startswith("FATAL: "): | 223 if line.startswith("FATAL: "): |
| 216 print line | 224 print line |
| 217 continue | 225 continue |
| 218 if not line.startswith('values '): | 226 if not line.startswith('values '): |
| 219 continue | 227 continue |
| 220 parts = line.split() | 228 parts = line.split() |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 394 | 402 |
| 395 | 403 |
| 396 @benchmark.Owner(emails=['hayato@chromium.org']) | 404 @benchmark.Owner(emails=['hayato@chromium.org']) |
| 397 class BlinkPerfShadowDOM(_BlinkPerfBenchmark): | 405 class BlinkPerfShadowDOM(_BlinkPerfBenchmark): |
| 398 tag = 'shadow_dom' | 406 tag = 'shadow_dom' |
| 399 subdir = 'ShadowDOM' | 407 subdir = 'ShadowDOM' |
| 400 | 408 |
| 401 @classmethod | 409 @classmethod |
| 402 def ShouldDisable(cls, possible_browser): # http://crbug.com/702319 | 410 def ShouldDisable(cls, possible_browser): # http://crbug.com/702319 |
| 403 return possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X' | 411 return possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X' |
| 404 | |
| OLD | NEW |