Index: tools/resource_prefetch_predictor/prefetch_predictor_tool.py |
diff --git a/tools/resource_prefetch_predictor/prefetch_predictor_tool.py b/tools/resource_prefetch_predictor/prefetch_predictor_tool.py |
index 9680c657cf68394793183f2140f63a799e67b53b..436e0216e787ee7b8ebbb98a80b605b239b75777 100755 |
--- a/tools/resource_prefetch_predictor/prefetch_predictor_tool.py |
+++ b/tools/resource_prefetch_predictor/prefetch_predictor_tool.py |
@@ -12,16 +12,17 @@ adb pull \ |
""" |
import argparse |
+import datetime |
import sqlite3 |
import os |
- |
-from resource_prefetch_predictor_pb2 import (PrefetchData, ResourceData) |
+import sys |
class Entry(object): |
"""Represents an entry in the predictor database.""" |
def __init__( |
self, primary_key, proto_buffer): |
+ from resource_prefetch_predictor_pb2 import (PrefetchData, ResourceData) |
self.primary_key = primary_key |
self.prefetch_data = PrefetchData() |
self.prefetch_data.ParseFromString(proto_buffer) |
@@ -36,6 +37,7 @@ class Entry(object): |
Return: |
The resource score (int). |
""" |
+ from resource_prefetch_predictor_pb2 import (PrefetchData, ResourceData) |
priority_multiplier = 1 |
type_multiplier = 1 |
@@ -78,6 +80,32 @@ class Entry(object): |
continue |
self._PrettyPrintResource(resource) |
+def DumpOriginDatabaseRow(domain, primary_key, proto): |
+ from resource_prefetch_predictor_pb2 import OriginData |
+ entry = OriginData() |
+ entry.ParseFromString(proto) |
+ # For the offset, see kWindowsEpochDeltaMicroseconds in |
+ # base/time/time_posix.cc. |
+ last_visit_timestamp = int(entry.last_visit_time / 1e6 - 11644473600) |
+ formatted_last_visit_time = datetime.datetime.utcfromtimestamp( |
+ last_visit_timestamp).strftime('%Y-%m-%d %H:%M:%S') |
+ print '''host: %s |
+last_visit_time: %s |
+origins:''' % (entry.host, formatted_last_visit_time) |
+ for origin_stat in entry.origins: |
+ print ''' origin: %s |
+ number_of_hits: %d |
+ number_of_misses: %d |
+ consecutive_misses: %d |
+ average_position: %f |
+ always_access_network: %s |
+ accessed_network: %s |
+''' % (origin_stat.origin, origin_stat.number_of_hits, |
+ origin_stat.number_of_misses, origin_stat.consecutive_misses, |
+ origin_stat.average_position, origin_stat.always_access_network, |
+ origin_stat.accessed_network) |
+ |
+ |
# The version of python sqlite3 library we have in Ubuntu 14.04 LTS doesn't |
# support views but command line util does. |
# TODO(alexilin): get rid of this when python sqlite3 adds view support. |
@@ -88,22 +116,42 @@ def CreateCompatibleDatabaseCopy(filename): |
subprocess.call(['sqlite3', tmpfile, 'DROP VIEW MmapStatus']) |
return tmpfile |
-def DatabaseStats(filename, domain): |
+def DatabaseStats(filename, host): |
+ query_template = 'SELECT key, proto from %s' |
connection = sqlite3.connect(filename) |
c = connection.cursor() |
- query = ('SELECT key, proto FROM resource_prefetch_predictor_host') |
+ print 'HOST DATABASE' |
+ query = query_template % 'resource_prefetch_predictor_host' |
entries = [Entry.FromRow(row) for row in c.execute(query)] |
for x in entries: |
- if domain is None or x.primary_key == domain: |
+ if host is None or x.primary_key == host: |
x.PrettyPrintCandidates() |
+ print '\n\nORIGIN DATABASE' |
+ query = query_template % 'resource_prefetch_predictor_origin' |
+ rows = [row for row in c.execute(query) |
+ if host is None or row.primary_key == host] |
+ for row in rows: |
+ DumpOriginDatabaseRow(host, *row) |
+ |
+ |
+def _AddProtocolBuffersPath(build_dir): |
+ assert os.path.isdir(build_dir) |
+ proto_dir = os.path.join( |
+ build_dir, os.path.join('pyproto', 'chrome', 'browser', 'predictors')) |
+ sys.path.append(proto_dir) |
+ |
def main(): |
parser = argparse.ArgumentParser() |
parser.add_argument('-f', dest='database_filename', required=True, |
help='Path to the database') |
parser.add_argument('-d', dest='domain', default=None, help='Domain') |
+ parser.add_argument('--build-dir', dest='build_dir', required=True, |
+ help='Path to the build directory.') |
args = parser.parse_args() |
+ _AddProtocolBuffersPath(args.build_dir) |
+ |
try: |
database_copy = CreateCompatibleDatabaseCopy(args.database_filename) |
DatabaseStats(database_copy, args.domain) |