Index: tools/android/loading/run_sandwich.py |
diff --git a/tools/android/loading/run_sandwich.py b/tools/android/loading/run_sandwich.py |
index 242cfbf49aca709e1eb5aa11eb5b7b5c5165aea6..9274819a2c4823824ee590f27828e15d4f6ffc97 100755 |
--- a/tools/android/loading/run_sandwich.py |
+++ b/tools/android/loading/run_sandwich.py |
@@ -12,10 +12,12 @@ TODO(pasko): implement cache preparation and WPR. |
""" |
import argparse |
+from datetime import datetime |
import json |
import logging |
import os |
import shutil |
+import subprocess |
import sys |
import tempfile |
import time |
@@ -59,6 +61,9 @@ _CHROME_PACKAGE = ( |
# operations, such as opening the launcher activity. |
_TIME_TO_DEVICE_IDLE_SECONDS = 2 |
+# Cache directory's path on the device. |
+_REMOTE_CACHE_DIRECTORY = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache' |
+ |
def _ReadUrlsFromJobDescription(job_name): |
"""Retrieves the list of URLs associated with the job name.""" |
@@ -104,6 +109,17 @@ def _UpdateTimestampFromAdbStat(filename, stat): |
os.utime(filename, (stat.st_time, stat.st_time)) |
+def _AdbShell(adb, cmd): |
+ adb.Shell(subprocess.list2cmdline(cmd)) |
+ |
+ |
+def _AdbUtime(adb, filename, timestamp): |
+ """Adb equivalent of os.utime(filename, (timestamp, timestamp)) |
+ """ |
+ touch_stamp = datetime.fromtimestamp(timestamp).strftime('%Y%m%d.%H%M%S') |
+ _AdbShell(adb, ['touch', '-t', touch_stamp, filename]) |
+ |
+ |
def _PullBrowserCache(device): |
"""Pulls the browser cache from the device and saves it locally. |
@@ -114,14 +130,13 @@ def _PullBrowserCache(device): |
Temporary directory containing all the browser cache. |
""" |
save_target = tempfile.mkdtemp(suffix='.cache') |
- cache_directory = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache' |
- for filename, stat in device.adb.Ls(cache_directory): |
+ for filename, stat in device.adb.Ls(_REMOTE_CACHE_DIRECTORY): |
if filename == '..': |
continue |
if filename == '.': |
cache_directory_stat = stat |
continue |
- original_file = os.path.join(cache_directory, filename) |
+ original_file = os.path.join(_REMOTE_CACHE_DIRECTORY, filename) |
saved_file = os.path.join(save_target, filename) |
device.adb.Pull(original_file, saved_file) |
_UpdateTimestampFromAdbStat(saved_file, stat) |
@@ -145,6 +160,35 @@ def _PullBrowserCache(device): |
return save_target |
+def _PushBrowserCache(device, local_cache_path): |
+ """Pushes the browser cache saved locally to the device. |
+ |
+ Args: |
+ device: Android device. |
+ local_cache_path: The directory's path containing the cache locally. |
+ """ |
+ # Clear previous cache. |
+ _AdbShell(device.adb, ['rm', '-rf', _REMOTE_CACHE_DIRECTORY]) |
+ _AdbShell(device.adb, ['mkdir', _REMOTE_CACHE_DIRECTORY]) |
+ |
+ # Push cache content. |
+ device.adb.Push(local_cache_path, _REMOTE_CACHE_DIRECTORY) |
+ |
+ # Walk through the local cache to update mtime on the device. |
+ def MirrorMtime(local_path): |
+ cache_relative_path = os.path.relpath(local_path, start=local_cache_path) |
+ remote_path = os.path.join(_REMOTE_CACHE_DIRECTORY, cache_relative_path) |
+ _AdbUtime(device.adb, remote_path, os.stat(local_path).st_mtime) |
+ |
+ for local_directory_path, dirnames, filenames in os.walk( |
+ local_cache_path, topdown=False): |
+ for filename in filenames: |
+ MirrorMtime(os.path.join(local_directory_path, filename)) |
+ for dirname in dirnames: |
+ MirrorMtime(os.path.join(local_directory_path, dirname)) |
+ MirrorMtime(local_cache_path) |
+ |
+ |
def _ZipDirectoryContent(root_directory_path, archive_dest_path): |
"""Zip a directory's content recursively with all the directories' |
timestamps preserved. |
@@ -155,6 +199,10 @@ def _ZipDirectoryContent(root_directory_path, archive_dest_path): |
""" |
with zipfile.ZipFile(archive_dest_path, 'w') as zip_output: |
timestamps = {} |
+ root_directory_stats = os.stat(root_directory_path) |
+ timestamps['.'] = { |
+ 'atime': root_directory_stats.st_atime, |
+ 'mtime': root_directory_stats.st_mtime} |
for directory_path, dirnames, filenames in os.walk(root_directory_path): |
for dirname in dirnames: |
subdirectory_path = os.path.join(directory_path, dirname) |
@@ -210,6 +258,24 @@ def _UnzipDirectoryContent(archive_path, directory_dest_path): |
os.utime(output_path, (stats['atime'], stats['mtime'])) |
+def _CleanPreviousTraces(output_directories_path): |
+ """Cleans previous traces from the output directory. |
+ |
+ Args: |
+ output_directories_path: The output directory path where to clean the |
+ previous traces. |
+ """ |
+ for dirname in os.listdir(output_directories_path): |
+ directory_path = os.path.join(output_directories_path, dirname) |
+ if not os.path.isdir(directory_path): |
+ continue |
+ try: |
+ int(dirname) |
+ except ValueError: |
+ continue |
+ shutil.rmtree(directory_path) |
+ |
+ |
def main(): |
logging.basicConfig(level=logging.INFO) |
devil_chromium.Initialize() |
@@ -221,35 +287,50 @@ def main(): |
help='Name of output directory to create.') |
parser.add_argument('--repeat', default=1, type=int, |
help='How many times to run the job') |
- parser.add_argument('--save-cache', default=False, |
- action='store_true', |
- help='Clear HTTP cache before start,' + |
- 'save cache before exit.') |
+ parser.add_argument('--cache-op', |
+ choices=['clear', 'save', 'push'], |
+ default='clear', |
+ help='Configures cache operation to do before launching ' |
+ +'Chrome. (Default is clear).') |
parser.add_argument('--wpr-archive', default=None, type=str, |
help='Web page replay archive to load job\'s urls from.') |
parser.add_argument('--wpr-record', default=False, action='store_true', |
help='Record web page replay archive.') |
args = parser.parse_args() |
- try: |
- os.makedirs(args.output) |
- except OSError: |
- logging.error('Cannot create directory for results: %s' % args.output) |
- raise |
+ if not os.path.isdir(args.output): |
+ try: |
+ os.makedirs(args.output) |
+ except OSError: |
+ logging.error('Cannot create directory for results: %s' % args.output) |
+ raise |
+ else: |
+ _CleanPreviousTraces(args.output) |
job_urls = _ReadUrlsFromJobDescription(args.job) |
device = device_utils.DeviceUtils.HealthyDevices()[0] |
+ local_cache_archive_path = os.path.join(args.output, 'cache.zip') |
+ local_cache_directory_path = None |
+ |
+ if args.cache_op == 'push': |
+ assert os.path.isfile(local_cache_archive_path) |
+ local_cache_directory_path = tempfile.mkdtemp(suffix='.cache') |
+ _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path) |
with device_setup.WprHost(device, |
args.wpr_archive, |
args.wpr_record) as additional_flags: |
pages_loaded = 0 |
- for iteration in xrange(args.repeat): |
+ for _ in xrange(args.repeat): |
for url in job_urls: |
+ if args.cache_op == 'push': |
+ device.KillAll(_CHROME_PACKAGE, quiet=True) |
+ _PushBrowserCache(device, local_cache_directory_path) |
with device_setup.DeviceConnection( |
device=device, |
additional_flags=additional_flags) as connection: |
- if iteration == 0 and pages_loaded == 0 and args.save_cache: |
+ if (pages_loaded == 0 and args.cache_op == 'save' or |
+ args.cache_op == 'clear'): |
connection.ClearCache() |
page_track.PageTrack(connection) |
tracing_track = tracing.TracingTrack(connection, |
@@ -261,7 +342,10 @@ def main(): |
_SaveChromeTrace(tracing_track.ToJsonDict(), args.output, |
str(pages_loaded)) |
- if args.save_cache: |
+ if local_cache_directory_path: |
+ shutil.rmtree(local_cache_directory_path) |
+ |
+ if args.cache_op == 'save': |
# Move Chrome to background to allow it to flush the index. |
device.adb.Shell('am start com.google.android.launcher') |
time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) |
@@ -269,8 +353,7 @@ def main(): |
time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) |
cache_directory_path = _PullBrowserCache(device) |
- _ZipDirectoryContent(cache_directory_path, |
- os.path.join(args.output, 'cache.zip')) |
+ _ZipDirectoryContent(cache_directory_path, local_cache_archive_path) |
shutil.rmtree(cache_directory_path) |