Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(19)

Side by Side Diff: tools/deep_memory_profiler/dmprof.py

Issue 15035009: Estimates a path in host from a corresponding path in Android device. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """The deep heap profiler script for Chrome.""" 5 """The deep heap profiler script for Chrome."""
6 6
7 import copy 7 import copy
8 import datetime 8 import datetime
9 import json 9 import json
10 import logging 10 import logging
(...skipping 23 matching lines...) Expand all
34 34
35 BUCKET_ID = 5 35 BUCKET_ID = 5
36 VIRTUAL = 0 36 VIRTUAL = 0
37 COMMITTED = 1 37 COMMITTED = 1
38 ALLOC_COUNT = 2 38 ALLOC_COUNT = 2
39 FREE_COUNT = 3 39 FREE_COUNT = 3
40 NULL_REGEX = re.compile('') 40 NULL_REGEX = re.compile('')
41 41
42 LOGGER = logging.getLogger('dmprof') 42 LOGGER = logging.getLogger('dmprof')
43 POLICIES_JSON_PATH = os.path.join(BASE_PATH, 'policies.json') 43 POLICIES_JSON_PATH = os.path.join(BASE_PATH, 'policies.json')
44 CHROME_SRC_PATH = os.path.join(BASE_PATH, os.pardir, os.pardir)
44 45
45 46
46 # Heap Profile Dump versions 47 # Heap Profile Dump versions
47 48
48 # DUMP_DEEP_[1-4] are obsolete. 49 # DUMP_DEEP_[1-4] are obsolete.
49 # DUMP_DEEP_2+ distinct mmap regions and malloc chunks. 50 # DUMP_DEEP_2+ distinct mmap regions and malloc chunks.
50 # DUMP_DEEP_3+ don't include allocation functions in their stack dumps. 51 # DUMP_DEEP_3+ don't include allocation functions in their stack dumps.
51 # DUMP_DEEP_4+ support comments with '#' and global stats "nonprofiled-*". 52 # DUMP_DEEP_4+ support comments with '#' and global stats "nonprofiled-*".
52 # DUMP_DEEP_[1-2] should be processed by POLICY_DEEP_1. 53 # DUMP_DEEP_[1-2] should be processed by POLICY_DEEP_1.
53 # DUMP_DEEP_[3-4] should be processed by POLICY_DEEP_2 or POLICY_DEEP_3. 54 # DUMP_DEEP_[3-4] should be processed by POLICY_DEEP_2 or POLICY_DEEP_3.
(...skipping 989 matching lines...) Expand 10 before | Expand all | Expand 10 after
1043 1044
1044 def __getitem__(self, index): 1045 def __getitem__(self, index):
1045 return self._dump_list[index] 1046 return self._dump_list[index]
1046 1047
1047 1048
1048 class Command(object): 1049 class Command(object):
1049 """Subclasses are a subcommand for this executable. 1050 """Subclasses are a subcommand for this executable.
1050 1051
1051 See COMMANDS in main(). 1052 See COMMANDS in main().
1052 """ 1053 """
1054 _DEVICE_LIB_PATTERN = re.compile(r'(/data/app-lib/.*-[0-9])/.*')
1055
1053 def __init__(self, usage): 1056 def __init__(self, usage):
1054 self._parser = optparse.OptionParser(usage) 1057 self._parser = optparse.OptionParser(usage)
1055 1058
1056 @staticmethod 1059 @staticmethod
1057 def load_basic_files( 1060 def load_basic_files(
1058 dump_path, multiple, no_dump=False, fake_directories=None): 1061 dump_path, multiple, no_dump=False, fake_directories=None):
1059 prefix = Command._find_prefix(dump_path) 1062 prefix = Command._find_prefix(dump_path)
1060 symbol_data_sources = SymbolDataSources(prefix, fake_directories or {}) 1063
1064 # Auto-estimating the binary with symbol information located on the host
bulach 2013/05/14 14:44:44 nit, I think this would be clearer: # Translate th
Dai Mikurube (NOT FULLTIME) 2013/05/14 17:21:47 To make it clear that it's an estimation, 1) Rena
1065 # from the binary path on the Android device.
1066 if not fake_directories:
1067 fake_directories = {}
1068 lib_on_device = Command._suppose_fake_directories(prefix)
bulach 2013/05/14 14:44:44 nit: perhaps device_lib = Command._get_device_lib(
Dai Mikurube (NOT FULLTIME) 2013/05/14 17:21:47 Finally, it's _estimate_alternative_dirs.
1069 if lib_on_device:
1070 fake_directories[lib_on_device] = os.path.join(
1071 CHROME_SRC_PATH, 'out', 'Debug', 'lib')
1072 if fake_directories:
1073 for device, host in fake_directories.iteritems():
1074 LOGGER.info('Assuming %s on device as %s on host' % (device, host))
1075 symbol_data_sources = SymbolDataSources(prefix, fake_directories)
1061 symbol_data_sources.prepare() 1076 symbol_data_sources.prepare()
1062 bucket_set = BucketSet() 1077 bucket_set = BucketSet()
1063 bucket_set.load(prefix) 1078 bucket_set.load(prefix)
1064 if not no_dump: 1079 if not no_dump:
1065 if multiple: 1080 if multiple:
1066 dump_list = DumpList.load(Command._find_all_dumps(dump_path)) 1081 dump_list = DumpList.load(Command._find_all_dumps(dump_path))
1067 else: 1082 else:
1068 dump = Dump.load(dump_path) 1083 dump = Dump.load(dump_path)
1069 symbol_mapping_cache = SymbolMappingCache() 1084 symbol_mapping_cache = SymbolMappingCache()
1070 with open(prefix + '.cache.function', 'a+') as cache_f: 1085 with open(prefix + '.cache.function', 'a+') as cache_f:
(...skipping 14 matching lines...) Expand all
1085 elif multiple: 1100 elif multiple:
1086 return (bucket_set, dump_list) 1101 return (bucket_set, dump_list)
1087 else: 1102 else:
1088 return (bucket_set, dump) 1103 return (bucket_set, dump)
1089 1104
1090 @staticmethod 1105 @staticmethod
1091 def _find_prefix(path): 1106 def _find_prefix(path):
1092 return re.sub('\.[0-9][0-9][0-9][0-9]\.heap', '', path) 1107 return re.sub('\.[0-9][0-9][0-9][0-9]\.heap', '', path)
1093 1108
1094 @staticmethod 1109 @staticmethod
1110 def _suppose_fake_directories(prefix):
bulach 2013/05/14 14:44:44 nit: perhaps a comment """Return the path for the
Dai Mikurube (NOT FULLTIME) 2013/05/14 17:21:47 As written above.
1111 device_lib_path_candidates = set()
1112
1113 with open(prefix + '.maps') as maps_f:
1114 maps = proc_maps.ProcMaps.load(maps_f)
1115 for entry in maps:
1116 matched = Command._DEVICE_LIB_PATTERN.match(entry.as_dict()['name'])
1117 if matched:
1118 device_lib_path_candidates.add(matched.group(1))
1119
1120 if len(device_lib_path_candidates) == 1:
1121 return device_lib_path_candidates.pop()
1122 else:
1123 return None
1124
1125 @staticmethod
1095 def _find_all_dumps(dump_path): 1126 def _find_all_dumps(dump_path):
1096 prefix = Command._find_prefix(dump_path) 1127 prefix = Command._find_prefix(dump_path)
1097 dump_path_list = [dump_path] 1128 dump_path_list = [dump_path]
1098 1129
1099 n = int(dump_path[len(dump_path) - 9 : len(dump_path) - 5]) 1130 n = int(dump_path[len(dump_path) - 9 : len(dump_path) - 5])
1100 n += 1 1131 n += 1
1101 while True: 1132 while True:
1102 p = '%s.%04d.heap' % (prefix, n) 1133 p = '%s.%04d.heap' % (prefix, n)
1103 if os.path.exists(p): 1134 if os.path.exists(p):
1104 dump_path_list.append(p) 1135 dump_path_list.append(p)
(...skipping 665 matching lines...) Expand 10 before | Expand all | Expand 10 after
1770 errorcode = COMMANDS[action]().do(sys.argv) 1801 errorcode = COMMANDS[action]().do(sys.argv)
1771 except ParsingException, e: 1802 except ParsingException, e:
1772 errorcode = 1 1803 errorcode = 1
1773 sys.stderr.write('Exit by parsing error: %s\n' % e) 1804 sys.stderr.write('Exit by parsing error: %s\n' % e)
1774 1805
1775 return errorcode 1806 return errorcode
1776 1807
1777 1808
1778 if __name__ == '__main__': 1809 if __name__ == '__main__':
1779 sys.exit(main()) 1810 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698