| OLD | NEW |
| 1 #! /usr/bin/env python | 1 #! /usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Instructs Chrome to load series of web pages and reports results. | 6 """Instructs Chrome to load series of web pages and reports results. |
| 7 | 7 |
| 8 When running Chrome is sandwiched between preprocessed disk caches and | 8 When running Chrome is sandwiched between preprocessed disk caches and |
| 9 WepPageReplay serving all connections. | 9 WepPageReplay serving all connections. |
| 10 | 10 |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 153 parser.add_argument('--job', required=True, | 153 parser.add_argument('--job', required=True, |
| 154 help='JSON file with job description.') | 154 help='JSON file with job description.') |
| 155 parser.add_argument('--output', required=True, | 155 parser.add_argument('--output', required=True, |
| 156 help='Name of output directory to create.') | 156 help='Name of output directory to create.') |
| 157 parser.add_argument('--repeat', default=1, type=int, | 157 parser.add_argument('--repeat', default=1, type=int, |
| 158 help='How many times to run the job') | 158 help='How many times to run the job') |
| 159 parser.add_argument('--save-cache', default=False, | 159 parser.add_argument('--save-cache', default=False, |
| 160 action='store_true', | 160 action='store_true', |
| 161 help='Clear HTTP cache before start,' + | 161 help='Clear HTTP cache before start,' + |
| 162 'save cache before exit.') | 162 'save cache before exit.') |
| 163 parser.add_argument('--wpr-archive', default=None, type=str, |
| 164 help='Web page replay archive to load job\'s urls from.') |
| 165 parser.add_argument('--wpr-record', default=False, action='store_true', |
| 166 help='Record web page replay archive.') |
| 163 args = parser.parse_args() | 167 args = parser.parse_args() |
| 164 | 168 |
| 165 try: | 169 try: |
| 166 os.makedirs(args.output) | 170 os.makedirs(args.output) |
| 167 except OSError: | 171 except OSError: |
| 168 logging.error('Cannot create directory for results: %s' % args.output) | 172 logging.error('Cannot create directory for results: %s' % args.output) |
| 169 raise | 173 raise |
| 170 | 174 |
| 171 job_urls = _ReadUrlsFromJobDescription(args.job) | 175 job_urls = _ReadUrlsFromJobDescription(args.job) |
| 172 device = device_utils.DeviceUtils.HealthyDevices()[0] | 176 device = device_utils.DeviceUtils.HealthyDevices()[0] |
| 173 | 177 |
| 174 pages_loaded = 0 | 178 with device_setup.WprHost(device, |
| 175 for iteration in xrange(args.repeat): | 179 args.wpr_archive, |
| 176 for url in job_urls: | 180 args.wpr_record) as additional_flags: |
| 177 with device_setup.DeviceConnection(device) as connection: | 181 pages_loaded = 0 |
| 178 if iteration == 0 and pages_loaded == 0 and args.save_cache: | 182 for iteration in xrange(args.repeat): |
| 179 connection.ClearCache() | 183 for url in job_urls: |
| 180 page_track.PageTrack(connection) | 184 with device_setup.DeviceConnection( |
| 181 tracing_track = tracing.TracingTrack(connection, | 185 device=device, |
| 182 categories='blink,cc,netlog,renderer.scheduler,toplevel,v8') | 186 additional_flags=additional_flags) as connection: |
| 183 connection.SetUpMonitoring() | 187 if iteration == 0 and pages_loaded == 0 and args.save_cache: |
| 184 connection.SendAndIgnoreResponse('Page.navigate', {'url': url}) | 188 connection.ClearCache() |
| 185 connection.StartMonitoring() | 189 page_track.PageTrack(connection) |
| 186 pages_loaded += 1 | 190 tracing_track = tracing.TracingTrack(connection, |
| 187 _SaveChromeTrace(tracing_track.ToJsonDict(), args.output, | 191 categories='blink,cc,netlog,renderer.scheduler,toplevel,v8') |
| 188 str(pages_loaded)) | 192 connection.SetUpMonitoring() |
| 193 connection.SendAndIgnoreResponse('Page.navigate', {'url': url}) |
| 194 connection.StartMonitoring() |
| 195 pages_loaded += 1 |
| 196 _SaveChromeTrace(tracing_track.ToJsonDict(), args.output, |
| 197 str(pages_loaded)) |
| 189 | 198 |
| 190 if args.save_cache: | 199 if args.save_cache: |
| 191 # Move Chrome to background to allow it to flush the index. | 200 # Move Chrome to background to allow it to flush the index. |
| 192 device.adb.Shell('am start com.google.android.launcher') | 201 device.adb.Shell('am start com.google.android.launcher') |
| 193 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) | 202 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) |
| 194 device.KillAll(_CHROME_PACKAGE, quiet=True) | 203 device.KillAll(_CHROME_PACKAGE, quiet=True) |
| 195 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) | 204 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) |
| 196 _SaveBrowserCache(device, args.output) | 205 _SaveBrowserCache(device, args.output) |
| 197 | 206 |
| 198 | 207 |
| 199 if __name__ == '__main__': | 208 if __name__ == '__main__': |
| 200 sys.exit(main()) | 209 sys.exit(main()) |
| OLD | NEW |