Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: tools/android/loading/sandwich_task_builder.py

Issue 1872313002: sandwich: Implement SandwichTaskBuilder (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: mv sandwich_tasks.py -> sandwich_task_builder.py Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import csv
6 import json
7 import os
8 import shutil
9
10 import chrome_cache
11 import common_util
12 import emulation
13 import sandwich_metrics
14 import sandwich_misc
15 from sandwich_runner import SandwichRunner
16 import task_manager
17
18
19 def NetworkSimulationTransformer(network_condition):
20 """Creates a function that accepts a SandwichRunner as a parameter and sets
21 network emulation options on it.
22
23 Args:
24 network_condition: The network condition to apply to the sandwich runner.
25
26 Returns:
27 A callback transforming the SandwichRunner given in argument accordingly
28 """
29 assert network_condition in emulation.NETWORK_CONDITIONS
30 def Transformer(sandwich_runner):
31 assert isinstance(sandwich_runner, SandwichRunner)
32 sandwich_runner.network_condition = network_condition
33 return Transformer
34
35
36 class SandwichTaskBuilder(task_manager.Builder):
37 """A builder for a graph of tasks, each prepares or invokes a SandwichRunner.
38 """
39
40 def __init__(self, output_directory, job_path, url_repeat):
41 """Constructor.
42
43 Args:
44 output_directory: As in task_manager.Builder.__init__
45 job_path: Path of the sandwich's job.
46 url_repeat: Non null integer controlling how many times the URLs should be
47 repeated in the benchmarks.
48 """
49 task_manager.Builder.__init__(self, output_directory)
50 self._job_path = job_path
51 self._url_repeat = url_repeat
52 self._default_final_tasks = []
53
54 self._original_wpr_task = None
55 self._patched_wpr_task = None
56 self._reference_cache_task = None
57 self._subresources_for_urls_run_task = None
58 self._subresources_for_urls_task = None
59
60 @property
61 def default_final_tasks(self):
62 return self._default_final_tasks
63
64 def _CreateSandwichRunner(self):
65 """Create a runner for non benchmark purposes."""
66 runner = SandwichRunner()
67 runner.LoadJob(self._job_path)
68 return runner
69
70 def OverridePathToWprArchive(self, original_wpr_path):
71 """Sets the original WPR archive path's to be used.
72
73 Args:
74 original_wpr_path: Path of the original WPR archive to be used.
75 """
76 self._original_wpr_task = \
77 self.CreateStaticTask('common/webpages.wpr', original_wpr_path)
78
79 def PopulateWprRecordingTask(self):
80 """Records the original WPR archive."""
81 @self.RegisterTask('common/webpages.wpr')
82 def BuildOriginalWpr():
83 common_util.EnsureParentDirectoryExists(BuildOriginalWpr.path)
84 runner = self._CreateSandwichRunner()
85 runner.wpr_archive_path = BuildOriginalWpr.path
86 runner.wpr_record = True
87 runner.Run()
88
89 self._original_wpr_task = BuildOriginalWpr
90
91 def PopulateCommonPipelines(self):
92 """Creates necessary tasks to produce initial cache archive.
93
94 Also creates a task for producing a json file with a mapping of URLs to
95 subresources (urls-resources.json).
96
97 Here is the full dependency tree for the returned task:
98 common/cache-ref-validation.log
99 depends on: common/cache-ref.zip
100 depends on: common/webpages-patched.wpr
101 depends on: common/webpages.wpr
102 depends on: common/urls-resources.json
103 depends on: common/urls-resources-run/
104 depends on: common/webpages.wpr
105
106 Returns:
107 The last task of the pipeline.
108 """
109 @self.RegisterTask('common/webpages-patched.wpr', [self._original_wpr_task])
110 def BuildPatchedWpr():
111 common_util.EnsureParentDirectoryExists(BuildPatchedWpr.path)
112 shutil.copyfile(self._original_wpr_task.path, BuildPatchedWpr.path)
113 sandwich_misc.PatchWpr(BuildPatchedWpr.path)
114
115 @self.RegisterTask('common/cache-ref.zip', [BuildPatchedWpr])
116 def BuildReferenceCache():
117 runner = self._CreateSandwichRunner()
118 runner.wpr_archive_path = BuildPatchedWpr.path
119 runner.cache_archive_path = BuildReferenceCache.path
120 runner.cache_operation = 'save'
121 runner.Run()
122
123 @self.RegisterTask('common/subresources-for-urls-run/',
124 dependencies=[self._original_wpr_task])
125 def UrlsResourcesRun():
126 runner = self._CreateSandwichRunner()
127 runner.wpr_archive_path = self._original_wpr_task.path
128 runner.cache_operation = 'clear'
129 runner.trace_output_directory = UrlsResourcesRun.path
130 runner.Run()
131
132 @self.RegisterTask('common/subresources-for-urls.json', [UrlsResourcesRun])
133 def ListUrlsResources():
134 json_content = sandwich_misc.ListResourcesUrls(UrlsResourcesRun.path)
135 with open(ListUrlsResources.path, 'w') as output:
136 json.dump(json_content, output)
137
138 @self.RegisterTask('common/cache-ref-validation.log',
139 [BuildReferenceCache, ListUrlsResources])
140 def ValidateReferenceCache():
141 json_content = json.load(open(ListUrlsResources.path))
142 ref_urls = set()
143 for urls in json_content.values():
144 ref_urls.update(set(urls))
145 sandwich_misc.ValidateCacheArchiveContent(
146 ref_urls, BuildReferenceCache.path)
147
148 self._patched_wpr_task = BuildPatchedWpr
149 self._reference_cache_task = BuildReferenceCache
150 self._subresources_for_urls_run_task = UrlsResourcesRun
151 self._subresources_for_urls_task = ListUrlsResources
152
153 self._default_final_tasks.append(ValidateReferenceCache)
154 return ValidateReferenceCache
155
156 def PopulateLoadBenchmark(self, subresource_discoverer,
157 runner_transformer_name='dummy',
158 runner_transformer=lambda arg: None):
159 """Populate the a benchmark's pipeline from it's setup tasks.
160
161 Args:
162 subresource_discoverer: Name of a sub-resources discoverer.
163 runner_transformer: A closure that would be applied once to SandwichRunner
164 before it is run.
165 runner_transformer_name: Name of the runner transformer used to generate
166 task names.
167 benchmark_name: The benchmark's name for that runner modifier.
168
169 Returns:
170 The last task_manager.Task of the pipeline.
171 """
172 assert subresource_discoverer in sandwich_misc.SUBRESOURCE_DISCOVERERS
173 assert 'shared' not in sandwich_misc.SUBRESOURCE_DISCOVERERS
174 shared_task_prefix = os.path.join('shared', subresource_discoverer)
175 task_prefix = os.path.join(runner_transformer_name,
pasko 2016/04/18 09:36:13 why os.path.join? I thought all tasks are immediat
gabadie 2016/04/19 17:39:49 With the growing number benchmarks, I have added a
pasko 2016/04/20 18:57:22 Ack. SG, I have not developed my personal taste on
176 subresource_discoverer)
177
178 @self.RegisterTask(shared_task_prefix + '-setup.json', merge=True,
179 dependencies=[self._subresources_for_urls_task])
180 def SetupBenchmark():
181 trace_path = os.path.join(
182 self._subresources_for_urls_run_task.path, '0/trace.json')
183 whitelisted_urls = sandwich_misc.ExtractDiscoverableUrls(
184 trace_path, subresource_discoverer)
185
186 urls_resources = json.load(open(self._subresources_for_urls_task.path))
187 assert len(urls_resources) == 1, \
188 "This recipe is not ready for multiple urls."
189 url = urls_resources.keys()[0]
190 url_resources = urls_resources[url]
191 common_util.EnsureParentDirectoryExists(SetupBenchmark.path)
192 with open(SetupBenchmark.path, 'w') as output:
193 json.dump({
194 'cache_whitelist': [url for url in whitelisted_urls],
195 'url_resources': url_resources,
196 }, output)
197
198 @self.RegisterTask(shared_task_prefix + '-cache.zip', merge=True,
199 dependencies=[
200 SetupBenchmark, self._reference_cache_task])
201 def BuildBenchmarkCacheArchive():
202 setup = json.load(open(SetupBenchmark.path))
203 chrome_cache.ApplyUrlWhitelistToCacheArchive(
204 cache_archive_path=self._reference_cache_task.path,
205 whitelisted_urls=setup['cache_whitelist'],
206 output_cache_archive_path=BuildBenchmarkCacheArchive.path)
207
208 @self.RegisterTask(task_prefix + '-run/',
209 dependencies=[BuildBenchmarkCacheArchive])
210 def RunBenchmark():
211 runner = self._CreateSandwichRunner()
212 # runner.record_video = True
213 runner.job_repeat = self._url_repeat
214 runner_transformer(runner)
215 runner.wpr_archive_path = self._patched_wpr_task.path
216 runner.wpr_out_log_path = os.path.join(RunBenchmark.path, 'wpr.log')
217 runner.cache_archive_path = BuildBenchmarkCacheArchive.path
218 runner.cache_operation = 'push'
219 runner.trace_output_directory = RunBenchmark.path
220 runner.Run()
221
222 @self.RegisterTask(task_prefix + '-metrics.csv',
223 dependencies=[RunBenchmark])
224 def ExtractMetrics():
225 sandwich_misc.VerifyBenchmarkOutputDirectory(
226 SetupBenchmark.path, RunBenchmark.path)
227 trace_metrics_list = sandwich_metrics.PullMetricsFromOutputDirectory(
228 RunBenchmark.path)
229 trace_metrics_list.sort(key=lambda e: e['id'])
230 with open(ExtractMetrics.path, 'w') as csv_file:
231 writer = csv.DictWriter(csv_file,
232 fieldnames=sandwich_metrics.CSV_FIELD_NAMES)
233 writer.writeheader()
234 for trace_metrics in trace_metrics_list:
235 writer.writerow(trace_metrics)
236
237 self._default_final_tasks.append(ExtractMetrics)
238 return ExtractMetrics
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698