Index: tools/perf/benchmarks/dom_perf.py |
diff --git a/tools/perf/benchmarks/dom_perf.py b/tools/perf/benchmarks/dom_perf.py |
index b39c7a13c3eaab271e3a3a29a69bee8151cba70a..8fc94470579b0fd76a4344210c300fee355c2bf6 100644 |
--- a/tools/perf/benchmarks/dom_perf.py |
+++ b/tools/perf/benchmarks/dom_perf.py |
@@ -6,10 +6,10 @@ import json |
import math |
import os |
+from measurements import PageTestMeasurement |
from telemetry import benchmark |
from telemetry.core import util |
from telemetry.page import page_set |
-from telemetry.page import page_test |
from telemetry.value import merge_values |
from telemetry.value import scalar |
@@ -37,7 +37,7 @@ SCORE_UNIT = 'score (bigger is better)' |
SCORE_TRACE_NAME = 'score' |
-class _DomPerfMeasurement(page_test.PageTest): |
+class _DomPerfMeasurement(PageTestMeasurement): |
def ValidateAndMeasurePage(self, page, tab, results): |
try: |
def _IsDone(): |
@@ -51,8 +51,11 @@ class _DomPerfMeasurement(page_test.PageTest): |
results.AddValue(scalar.ScalarValue( |
results.current_page, '%s.%s' % (suite['name'], SCORE_TRACE_NAME), |
SCORE_UNIT, suite['score'], important=False)) |
+ |
finally: |
tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"') |
+ super(_DomPerfMeasurement, self).ValidateAndMeasurePage( |
+ page, tab, results) |
def DidRunTest(self, browser, results): |
# Now give the geometric mean as the total for the combined runs. |