OLD | NEW |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import copy | 5 import copy |
6 import datetime | 6 import datetime |
7 import logging | 7 import logging |
8 import os | 8 import os |
9 import re | 9 import re |
10 import time | 10 import time |
11 | 11 |
| 12 from lib.dump import Dump |
12 from lib.exceptions import EmptyDumpException, InvalidDumpException | 13 from lib.exceptions import EmptyDumpException, InvalidDumpException |
13 from lib.exceptions import ObsoleteDumpVersionException, ParsingException | 14 from lib.exceptions import ObsoleteDumpVersionException, ParsingException |
14 from lib.pageframe import PageFrame | 15 from lib.pageframe import PageFrame |
15 from lib.range_dict import ExclusiveRangeDict | 16 from lib.range_dict import ExclusiveRangeDict |
16 from lib.symbol import procfs | 17 from lib.symbol import procfs |
17 | 18 |
18 | 19 |
19 LOGGER = logging.getLogger('dmprof') | 20 LOGGER = logging.getLogger('dmprof') |
20 VIRTUAL, COMMITTED, ALLOC_COUNT, FREE_COUNT, _AT, BUCKET_ID = range(6) | 21 VIRTUAL, COMMITTED, ALLOC_COUNT, FREE_COUNT, _AT, BUCKET_ID = range(6) |
21 | 22 |
(...skipping 15 matching lines...) Expand all Loading... |
37 | 38 |
38 # DUMP_DEEP_5 doesn't separate sections for malloc and mmap. | 39 # DUMP_DEEP_5 doesn't separate sections for malloc and mmap. |
39 # malloc and mmap are identified in bucket files. | 40 # malloc and mmap are identified in bucket files. |
40 # DUMP_DEEP_5 should be processed by POLICY_DEEP_4. | 41 # DUMP_DEEP_5 should be processed by POLICY_DEEP_4. |
41 DUMP_DEEP_5 = 'DUMP_DEEP_5' | 42 DUMP_DEEP_5 = 'DUMP_DEEP_5' |
42 | 43 |
43 # DUMP_DEEP_6 adds a mmap list to DUMP_DEEP_5. | 44 # DUMP_DEEP_6 adds a mmap list to DUMP_DEEP_5. |
44 DUMP_DEEP_6 = 'DUMP_DEEP_6' | 45 DUMP_DEEP_6 = 'DUMP_DEEP_6' |
45 | 46 |
46 | 47 |
47 class Dump(object): | 48 class DeepDump(Dump): |
48 """Represents a heap profile dump.""" | 49 """Represents a heap profile dump.""" |
49 | 50 |
50 _PATH_PATTERN = re.compile(r'^(.*)\.([0-9]+)\.([0-9]+)\.heap$') | 51 _PATH_PATTERN = re.compile(r'^(.*)\.([0-9]+)\.([0-9]+)\.heap$') |
51 | 52 |
52 _HOOK_PATTERN = re.compile( | 53 _HOOK_PATTERN = re.compile( |
53 r'^ ([ \(])([a-f0-9]+)([ \)])-([ \(])([a-f0-9]+)([ \)])\s+' | 54 r'^ ([ \(])([a-f0-9]+)([ \)])-([ \(])([a-f0-9]+)([ \)])\s+' |
54 r'(hooked|unhooked)\s+(.+)$', re.IGNORECASE) | 55 r'(hooked|unhooked)\s+(.+)$', re.IGNORECASE) |
55 | 56 |
56 _HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' | 57 _HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' |
57 '(?P<RESERVED>[0-9]+) @ (?P<BUCKETID>[0-9]+)') | 58 '(?P<RESERVED>[0-9]+) @ (?P<BUCKETID>[0-9]+)') |
58 _UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' | 59 _UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' |
59 '(?P<RESERVED>[0-9]+)') | 60 '(?P<RESERVED>[0-9]+)') |
60 | 61 |
61 _OLD_HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) @ (?P<BUCKETID>[0-9]+)') | 62 _OLD_HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) @ (?P<BUCKETID>[0-9]+)') |
62 _OLD_UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) (?P<COMMITTED>[0-9]+)') | 63 _OLD_UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) (?P<COMMITTED>[0-9]+)') |
63 | 64 |
64 _TIME_PATTERN_FORMAT = re.compile( | 65 _TIME_PATTERN_FORMAT = re.compile( |
65 r'^Time: ([0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9]+)(\.[0-9]+)?') | 66 r'^Time: ([0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9]+)(\.[0-9]+)?') |
66 _TIME_PATTERN_SECONDS = re.compile(r'^Time: ([0-9]+)$') | 67 _TIME_PATTERN_SECONDS = re.compile(r'^Time: ([0-9]+)$') |
67 | 68 |
68 def __init__(self, path, modified_time): | 69 def __init__(self, path, modified_time): |
| 70 super(DeepDump, self).__init__() |
69 self._path = path | 71 self._path = path |
70 matched = self._PATH_PATTERN.match(path) | 72 matched = self._PATH_PATTERN.match(path) |
71 self._pid = int(matched.group(2)) | 73 self._pid = int(matched.group(2)) |
72 self._count = int(matched.group(3)) | 74 self._count = int(matched.group(3)) |
73 self._time = modified_time | 75 self._time = modified_time |
74 self._map = {} | 76 self._map = {} |
75 self._procmaps = ExclusiveRangeDict(ProcMapsEntryAttribute) | 77 self._procmaps = ExclusiveRangeDict(ProcMapsEntryAttribute) |
76 self._stacktrace_lines = [] | 78 self._stacktrace_lines = [] |
77 self._global_stats = {} # used only in apply_policy | 79 self._global_stats = {} # used only in apply_policy |
78 | 80 |
(...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
400 True if the given stacktrace_line is valid. | 402 True if the given stacktrace_line is valid. |
401 """ | 403 """ |
402 words = stacktrace_line.split() | 404 words = stacktrace_line.split() |
403 if len(words) < BUCKET_ID + 1: | 405 if len(words) < BUCKET_ID + 1: |
404 return False | 406 return False |
405 if words[BUCKET_ID - 1] != '@': | 407 if words[BUCKET_ID - 1] != '@': |
406 return False | 408 return False |
407 return True | 409 return True |
408 | 410 |
409 | 411 |
410 class DumpList(object): | |
411 """Represents a sequence of heap profile dumps. | |
412 | |
413 Individual dumps are loaded into memory lazily as the sequence is accessed, | |
414 either while being iterated through or randomly accessed. Loaded dumps are | |
415 not cached, meaning a newly loaded Dump object is returned every time an | |
416 element in the list is accessed. | |
417 """ | |
418 | |
419 def __init__(self, dump_path_list): | |
420 self._dump_path_list = dump_path_list | |
421 | |
422 @staticmethod | |
423 def load(path_list): | |
424 return DumpList(path_list) | |
425 | |
426 def __len__(self): | |
427 return len(self._dump_path_list) | |
428 | |
429 def __iter__(self): | |
430 for dump in self._dump_path_list: | |
431 yield Dump.load(dump) | |
432 | |
433 def __getitem__(self, index): | |
434 return Dump.load(self._dump_path_list[index]) | |
435 | |
436 | |
437 class ProcMapsEntryAttribute(ExclusiveRangeDict.RangeAttribute): | 412 class ProcMapsEntryAttribute(ExclusiveRangeDict.RangeAttribute): |
438 """Represents an entry of /proc/maps in range_dict.ExclusiveRangeDict.""" | 413 """Represents an entry of /proc/maps in range_dict.ExclusiveRangeDict.""" |
439 _DUMMY_ENTRY = procfs.ProcMapsEntry( | 414 _DUMMY_ENTRY = procfs.ProcMapsEntry( |
440 0, # begin | 415 0, # begin |
441 0, # end | 416 0, # end |
442 '-', # readable | 417 '-', # readable |
443 '-', # writable | 418 '-', # writable |
444 '-', # executable | 419 '-', # executable |
445 '-', # private | 420 '-', # private |
446 0, # offset | 421 0, # offset |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
481 Returns: | 456 Returns: |
482 A pair of an integer indicating a line number after skipped, and a | 457 A pair of an integer indicating a line number after skipped, and a |
483 boolean value which is True if found a line which skipping_condition | 458 boolean value which is True if found a line which skipping_condition |
484 is False for. | 459 is False for. |
485 """ | 460 """ |
486 while skipping_condition(index): | 461 while skipping_condition(index): |
487 index += 1 | 462 index += 1 |
488 if index >= max_index: | 463 if index >= max_index: |
489 return index, False | 464 return index, False |
490 return index, True | 465 return index, True |
OLD | NEW |