| Index: content/test/gpu/gpu_tests/gpu_process.py
|
| diff --git a/content/test/gpu/gpu_tests/gpu_process.py b/content/test/gpu/gpu_tests/gpu_process.py
|
| index a27c701594c178945284891d18268a2c76b82880..4a54824f6253c9e3a00362468f8921f9320ea53d 100644
|
| --- a/content/test/gpu/gpu_tests/gpu_process.py
|
| +++ b/content/test/gpu/gpu_tests/gpu_process.py
|
| @@ -6,6 +6,7 @@ from gpu_tests import gpu_test_base
|
| import page_sets
|
|
|
| from telemetry.page import page_test
|
| +from telemetry.story import story_set as story_set_module
|
|
|
| test_harness_script = r"""
|
| var domAutomationController = {};
|
| @@ -51,3 +52,39 @@ class GpuProcess(gpu_test_base.TestBase):
|
| for page in story_set:
|
| page.script_to_evaluate_on_commit = test_harness_script
|
| return story_set
|
| +
|
| +class NoGpuProcessValidator(gpu_test_base.ValidatorBase):
|
| + def __init__(self):
|
| + super(NoGpuProcessValidator, self).__init__(
|
| + needs_browser_restart_after_each_page=True)
|
| +
|
| + def ValidateAndMeasurePage(self, page, tab, results):
|
| + has_gpu_process_js = 'chrome.gpuBenchmarking.hasGpuProcess()'
|
| + has_gpu_process = tab.EvaluateJavaScript(has_gpu_process_js)
|
| + if has_gpu_process:
|
| + raise page_test.Failure('GPU process detected')
|
| +
|
| +class NoGpuProcess(gpu_test_base.TestBase):
|
| + """Tests that accelerated content does not trigger the creation of a GPU
|
| + process if the card is black listed"""
|
| + test = NoGpuProcessValidator
|
| +
|
| + @classmethod
|
| + def Name(cls):
|
| + return 'no_gpu_process'
|
| +
|
| + def _CreateExpectations(self):
|
| + return expectations.GpuProcessExpectations()
|
| +
|
| + def CreateStorySet(self, options):
|
| + options.AppendExtraBrowserArgs('--gpu-driver-vendor=Mesa')
|
| + options.AppendExtraBrowserArgs('--gl-vendor-string=VMware')
|
| + options.AppendExtraBrowserArgs('--gl-renderer-string=softpipe')
|
| +
|
| + story_set = story_set_module.StorySet()
|
| + story_set.AddStory(gpu_test_base.PageBase(
|
| + url='about:blank', name='GpuProcess.no_gpu_process',
|
| + page_set=story_set, expectations=self.GetExpectations()))
|
| + for page in story_set:
|
| + page.script_to_evaluate_on_commit = test_harness_script
|
| + return story_set
|
|
|