OLD | NEW |
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """The deep heap profiler script for Chrome.""" | 5 """The deep heap profiler script for Chrome.""" |
6 | 6 |
7 import copy | 7 import copy |
8 import datetime | 8 import datetime |
9 import json | 9 import json |
10 import logging | 10 import logging |
(...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
370 len(self._symbol_mapping_caches[symbol_type])) | 370 len(self._symbol_mapping_caches[symbol_type])) |
371 except IOError as e: | 371 except IOError as e: |
372 LOGGER.info('The symbol cache file is invalid: %s' % e) | 372 LOGGER.info('The symbol cache file is invalid: %s' % e) |
373 | 373 |
374 | 374 |
375 class Rule(object): | 375 class Rule(object): |
376 """Represents one matching rule in a policy file.""" | 376 """Represents one matching rule in a policy file.""" |
377 | 377 |
378 def __init__(self, | 378 def __init__(self, |
379 name, | 379 name, |
380 mmap, | 380 allocator_type, |
381 stackfunction_pattern=None, | 381 stackfunction_pattern=None, |
382 stacksourcefile_pattern=None, | 382 stacksourcefile_pattern=None, |
383 typeinfo_pattern=None): | 383 typeinfo_pattern=None, |
| 384 mappedpathname_pattern=None, |
| 385 mappedpermission_pattern=None): |
384 self._name = name | 386 self._name = name |
385 self._mmap = mmap | 387 self._allocator_type = allocator_type |
386 | 388 |
387 self._stackfunction_pattern = None | 389 self._stackfunction_pattern = None |
388 if stackfunction_pattern: | 390 if stackfunction_pattern: |
389 self._stackfunction_pattern = re.compile( | 391 self._stackfunction_pattern = re.compile( |
390 stackfunction_pattern + r'\Z') | 392 stackfunction_pattern + r'\Z') |
391 | 393 |
392 self._stacksourcefile_pattern = None | 394 self._stacksourcefile_pattern = None |
393 if stacksourcefile_pattern: | 395 if stacksourcefile_pattern: |
394 self._stacksourcefile_pattern = re.compile( | 396 self._stacksourcefile_pattern = re.compile( |
395 stacksourcefile_pattern + r'\Z') | 397 stacksourcefile_pattern + r'\Z') |
396 | 398 |
397 self._typeinfo_pattern = None | 399 self._typeinfo_pattern = None |
398 if typeinfo_pattern: | 400 if typeinfo_pattern: |
399 self._typeinfo_pattern = re.compile(typeinfo_pattern + r'\Z') | 401 self._typeinfo_pattern = re.compile(typeinfo_pattern + r'\Z') |
400 | 402 |
| 403 self._mappedpathname_pattern = None |
| 404 if mappedpathname_pattern: |
| 405 self._mappedpathname_pattern = re.compile(mappedpathname_pattern + r'\Z') |
| 406 |
| 407 self._mappedpermission_pattern = None |
| 408 if mappedpermission_pattern: |
| 409 self._mappedpermission_pattern = re.compile( |
| 410 mappedpermission_pattern + r'\Z') |
| 411 |
401 @property | 412 @property |
402 def name(self): | 413 def name(self): |
403 return self._name | 414 return self._name |
404 | 415 |
405 @property | 416 @property |
406 def mmap(self): | 417 def allocator_type(self): |
407 return self._mmap | 418 return self._allocator_type |
408 | 419 |
409 @property | 420 @property |
410 def stackfunction_pattern(self): | 421 def stackfunction_pattern(self): |
411 return self._stackfunction_pattern | 422 return self._stackfunction_pattern |
412 | 423 |
413 @property | 424 @property |
414 def stacksourcefile_pattern(self): | 425 def stacksourcefile_pattern(self): |
415 return self._stacksourcefile_pattern | 426 return self._stacksourcefile_pattern |
416 | 427 |
417 @property | 428 @property |
418 def typeinfo_pattern(self): | 429 def typeinfo_pattern(self): |
419 return self._typeinfo_pattern | 430 return self._typeinfo_pattern |
420 | 431 |
| 432 @property |
| 433 def mappedpathname_pattern(self): |
| 434 return self._mappedpathname_pattern |
| 435 |
| 436 @property |
| 437 def mappedpermission_pattern(self): |
| 438 return self._mappedpermission_pattern |
| 439 |
421 | 440 |
422 class Policy(object): | 441 class Policy(object): |
423 """Represents a policy, a content of a policy file.""" | 442 """Represents a policy, a content of a policy file.""" |
424 | 443 |
425 def __init__(self, rules, version, components): | 444 def __init__(self, rules, version, components): |
426 self._rules = rules | 445 self._rules = rules |
427 self._version = version | 446 self._version = version |
428 self._components = components | 447 self._components = components |
429 | 448 |
430 @property | 449 @property |
(...skipping 22 matching lines...) Expand all Loading... |
453 if bucket.component_cache: | 472 if bucket.component_cache: |
454 return bucket.component_cache | 473 return bucket.component_cache |
455 | 474 |
456 stackfunction = bucket.symbolized_joined_stackfunction | 475 stackfunction = bucket.symbolized_joined_stackfunction |
457 stacksourcefile = bucket.symbolized_joined_stacksourcefile | 476 stacksourcefile = bucket.symbolized_joined_stacksourcefile |
458 typeinfo = bucket.symbolized_typeinfo | 477 typeinfo = bucket.symbolized_typeinfo |
459 if typeinfo.startswith('0x'): | 478 if typeinfo.startswith('0x'): |
460 typeinfo = bucket.typeinfo_name | 479 typeinfo = bucket.typeinfo_name |
461 | 480 |
462 for rule in self._rules: | 481 for rule in self._rules: |
463 if (bucket.mmap == rule.mmap and | 482 if (bucket.allocator_type == rule.allocator_type and |
464 (not rule.stackfunction_pattern or | 483 (not rule.stackfunction_pattern or |
465 rule.stackfunction_pattern.match(stackfunction)) and | 484 rule.stackfunction_pattern.match(stackfunction)) and |
466 (not rule.stacksourcefile_pattern or | 485 (not rule.stacksourcefile_pattern or |
467 rule.stacksourcefile_pattern.match(stacksourcefile)) and | 486 rule.stacksourcefile_pattern.match(stacksourcefile)) and |
468 (not rule.typeinfo_pattern or rule.typeinfo_pattern.match(typeinfo))): | 487 (not rule.typeinfo_pattern or rule.typeinfo_pattern.match(typeinfo))): |
469 bucket.component_cache = rule.name | 488 bucket.component_cache = rule.name |
470 return rule.name | 489 return rule.name |
471 | 490 |
472 assert False | 491 assert False |
473 | 492 |
| 493 def find_unhooked(self, region): |
| 494 for rule in self._rules: |
| 495 if (region[0] == 'unhooked' and |
| 496 rule.allocator_type == 'unhooked' and |
| 497 (not rule.mappedpathname_pattern or |
| 498 rule.mappedpathname_pattern.match(region[1]['vma']['name'])) and |
| 499 (not rule.mappedpermission_pattern or |
| 500 rule.mappedpermission_pattern.match( |
| 501 region[1]['vma']['readable'] + |
| 502 region[1]['vma']['writable'] + |
| 503 region[1]['vma']['executable'] + |
| 504 region[1]['vma']['private']))): |
| 505 return rule.name |
| 506 |
| 507 assert False |
| 508 |
474 @staticmethod | 509 @staticmethod |
475 def load(filename, filetype): | 510 def load(filename, filetype): |
476 """Loads a policy file of |filename| in a |format|. | 511 """Loads a policy file of |filename| in a |format|. |
477 | 512 |
478 Args: | 513 Args: |
479 filename: A filename to be loaded. | 514 filename: A filename to be loaded. |
480 filetype: A string to specify a type of the file. Only 'json' is | 515 filetype: A string to specify a type of the file. Only 'json' is |
481 supported for now. | 516 supported for now. |
482 | 517 |
483 Returns: | 518 Returns: |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
518 A loaded policy object. | 553 A loaded policy object. |
519 """ | 554 """ |
520 policy = json.load(policy_f) | 555 policy = json.load(policy_f) |
521 | 556 |
522 rules = [] | 557 rules = [] |
523 for rule in policy['rules']: | 558 for rule in policy['rules']: |
524 stackfunction = rule.get('stackfunction') or rule.get('stacktrace') | 559 stackfunction = rule.get('stackfunction') or rule.get('stacktrace') |
525 stacksourcefile = rule.get('stacksourcefile') | 560 stacksourcefile = rule.get('stacksourcefile') |
526 rules.append(Rule( | 561 rules.append(Rule( |
527 rule['name'], | 562 rule['name'], |
528 rule['allocator'] == 'mmap', | 563 rule['allocator'], # allocator_type |
529 stackfunction, | 564 stackfunction, |
530 stacksourcefile, | 565 stacksourcefile, |
531 rule['typeinfo'] if 'typeinfo' in rule else None)) | 566 rule['typeinfo'] if 'typeinfo' in rule else None, |
| 567 rule.get('mappedpathname'), |
| 568 rule.get('mappedpermission'))) |
532 | 569 |
533 return Policy(rules, policy['version'], policy['components']) | 570 return Policy(rules, policy['version'], policy['components']) |
534 | 571 |
535 | 572 |
536 class PolicySet(object): | 573 class PolicySet(object): |
537 """Represents a set of policies.""" | 574 """Represents a set of policies.""" |
538 | 575 |
539 def __init__(self, policy_directory): | 576 def __init__(self, policy_directory): |
540 self._policy_directory = policy_directory | 577 self._policy_directory = policy_directory |
541 | 578 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
591 LOGGER.info(' %s: %s' % (label, directory[label]['file'])) | 628 LOGGER.info(' %s: %s' % (label, directory[label]['file'])) |
592 loaded = Policy.load(directory[label]['file'], directory[label]['format']) | 629 loaded = Policy.load(directory[label]['file'], directory[label]['format']) |
593 if loaded: | 630 if loaded: |
594 policies[label] = loaded | 631 policies[label] = loaded |
595 return PolicySet(policies) | 632 return PolicySet(policies) |
596 | 633 |
597 | 634 |
598 class Bucket(object): | 635 class Bucket(object): |
599 """Represents a bucket, which is a unit of memory block classification.""" | 636 """Represents a bucket, which is a unit of memory block classification.""" |
600 | 637 |
601 def __init__(self, stacktrace, mmap, typeinfo, typeinfo_name): | 638 def __init__(self, stacktrace, allocator_type, typeinfo, typeinfo_name): |
602 self._stacktrace = stacktrace | 639 self._stacktrace = stacktrace |
603 self._mmap = mmap | 640 self._allocator_type = allocator_type |
604 self._typeinfo = typeinfo | 641 self._typeinfo = typeinfo |
605 self._typeinfo_name = typeinfo_name | 642 self._typeinfo_name = typeinfo_name |
606 | 643 |
607 self._symbolized_stackfunction = stacktrace | 644 self._symbolized_stackfunction = stacktrace |
608 self._symbolized_joined_stackfunction = '' | 645 self._symbolized_joined_stackfunction = '' |
609 self._symbolized_stacksourcefile = stacktrace | 646 self._symbolized_stacksourcefile = stacktrace |
610 self._symbolized_joined_stacksourcefile = '' | 647 self._symbolized_joined_stacksourcefile = '' |
611 self._symbolized_typeinfo = typeinfo_name | 648 self._symbolized_typeinfo = typeinfo_name |
612 | 649 |
613 self.component_cache = '' | 650 self.component_cache = '' |
614 | 651 |
615 def __str__(self): | 652 def __str__(self): |
616 result = [] | 653 result = [] |
617 result.append('mmap' if self._mmap else 'malloc') | 654 result.append(self._allocator_type) |
618 if self._symbolized_typeinfo == 'no typeinfo': | 655 if self._symbolized_typeinfo == 'no typeinfo': |
619 result.append('tno_typeinfo') | 656 result.append('tno_typeinfo') |
620 else: | 657 else: |
621 result.append('t' + self._symbolized_typeinfo) | 658 result.append('t' + self._symbolized_typeinfo) |
622 result.append('n' + self._typeinfo_name) | 659 result.append('n' + self._typeinfo_name) |
623 result.extend(['%s(@%s)' % (function, sourcefile) | 660 result.extend(['%s(@%s)' % (function, sourcefile) |
624 for function, sourcefile | 661 for function, sourcefile |
625 in zip(self._symbolized_stackfunction, | 662 in zip(self._symbolized_stackfunction, |
626 self._symbolized_stacksourcefile)]) | 663 self._symbolized_stacksourcefile)]) |
627 return ' '.join(result) | 664 return ' '.join(result) |
(...skipping 24 matching lines...) Expand all Loading... |
652 self._symbolized_typeinfo = 'no typeinfo' | 689 self._symbolized_typeinfo = 'no typeinfo' |
653 | 690 |
654 def clear_component_cache(self): | 691 def clear_component_cache(self): |
655 self.component_cache = '' | 692 self.component_cache = '' |
656 | 693 |
657 @property | 694 @property |
658 def stacktrace(self): | 695 def stacktrace(self): |
659 return self._stacktrace | 696 return self._stacktrace |
660 | 697 |
661 @property | 698 @property |
662 def mmap(self): | 699 def allocator_type(self): |
663 return self._mmap | 700 return self._allocator_type |
664 | 701 |
665 @property | 702 @property |
666 def typeinfo(self): | 703 def typeinfo(self): |
667 return self._typeinfo | 704 return self._typeinfo |
668 | 705 |
669 @property | 706 @property |
670 def typeinfo_name(self): | 707 def typeinfo_name(self): |
671 return self._typeinfo_name | 708 return self._typeinfo_name |
672 | 709 |
673 @property | 710 @property |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
733 self._typeinfo_addresses.add(typeinfo) | 770 self._typeinfo_addresses.add(typeinfo) |
734 elif word[0] == 'n': | 771 elif word[0] == 'n': |
735 typeinfo_name = word[1:] | 772 typeinfo_name = word[1:] |
736 else: | 773 else: |
737 stacktrace_begin = index | 774 stacktrace_begin = index |
738 break | 775 break |
739 stacktrace = [int(address, 16) for address in words[stacktrace_begin:]] | 776 stacktrace = [int(address, 16) for address in words[stacktrace_begin:]] |
740 for frame in stacktrace: | 777 for frame in stacktrace: |
741 self._code_addresses.add(frame) | 778 self._code_addresses.add(frame) |
742 self._buckets[int(words[0])] = Bucket( | 779 self._buckets[int(words[0])] = Bucket( |
743 stacktrace, words[1] == 'mmap', typeinfo, typeinfo_name) | 780 stacktrace, words[1], typeinfo, typeinfo_name) |
744 | 781 |
745 def __iter__(self): | 782 def __iter__(self): |
746 for bucket_id, bucket_content in self._buckets.iteritems(): | 783 for bucket_id, bucket_content in self._buckets.iteritems(): |
747 yield bucket_id, bucket_content | 784 yield bucket_id, bucket_content |
748 | 785 |
749 def __getitem__(self, bucket_id): | 786 def __getitem__(self, bucket_id): |
750 return self._buckets[bucket_id] | 787 return self._buckets[bucket_id] |
751 | 788 |
752 def get(self, bucket_id): | 789 def get(self, bucket_id): |
753 return self._buckets.get(bucket_id) | 790 return self._buckets.get(bucket_id) |
(...skipping 17 matching lines...) Expand all Loading... |
771 | 808 |
772 class Dump(object): | 809 class Dump(object): |
773 """Represents a heap profile dump.""" | 810 """Represents a heap profile dump.""" |
774 | 811 |
775 _PATH_PATTERN = re.compile(r'^(.*)\.([0-9]+)\.([0-9]+)\.heap$') | 812 _PATH_PATTERN = re.compile(r'^(.*)\.([0-9]+)\.([0-9]+)\.heap$') |
776 | 813 |
777 _HOOK_PATTERN = re.compile( | 814 _HOOK_PATTERN = re.compile( |
778 r'^ ([ \(])([a-f0-9]+)([ \)])-([ \(])([a-f0-9]+)([ \)])\s+' | 815 r'^ ([ \(])([a-f0-9]+)([ \)])-([ \(])([a-f0-9]+)([ \)])\s+' |
779 r'(hooked|unhooked)\s+(.+)$', re.IGNORECASE) | 816 r'(hooked|unhooked)\s+(.+)$', re.IGNORECASE) |
780 | 817 |
| 818 _HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' |
| 819 '(?P<RESERVED>[0-9]+) @ (?P<BUCKETID>[0-9]+)') |
| 820 _UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' |
| 821 '(?P<RESERVED>[0-9]+)') |
| 822 |
| 823 _OLD_HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) @ (?P<BUCKETID>[0-9]+)') |
| 824 _OLD_UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) (?P<COMMITTED>[0-9]+)') |
| 825 |
781 _TIME_PATTERN_FORMAT = re.compile( | 826 _TIME_PATTERN_FORMAT = re.compile( |
782 r'^Time: ([0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9]+)(\.[0-9]+)?') | 827 r'^Time: ([0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9]+)(\.[0-9]+)?') |
783 _TIME_PATTERN_SECONDS = re.compile(r'^Time: ([0-9]+)$') | 828 _TIME_PATTERN_SECONDS = re.compile(r'^Time: ([0-9]+)$') |
784 | 829 |
785 def __init__(self, path, modified_time): | 830 def __init__(self, path, modified_time): |
786 self._path = path | 831 self._path = path |
787 matched = self._PATH_PATTERN.match(path) | 832 matched = self._PATH_PATTERN.match(path) |
788 self._pid = int(matched.group(2)) | 833 self._pid = int(matched.group(2)) |
789 self._count = int(matched.group(3)) | 834 self._count = int(matched.group(3)) |
790 self._time = modified_time | 835 self._time = modified_time |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
953 def _parse_mmap_list(self): | 998 def _parse_mmap_list(self): |
954 """Parses lines in self._lines as a mmap list.""" | 999 """Parses lines in self._lines as a mmap list.""" |
955 (ln, found) = skip_while( | 1000 (ln, found) = skip_while( |
956 0, len(self._lines), | 1001 0, len(self._lines), |
957 lambda n: self._lines[n] != 'MMAP_LIST:\n') | 1002 lambda n: self._lines[n] != 'MMAP_LIST:\n') |
958 if not found: | 1003 if not found: |
959 return {} | 1004 return {} |
960 | 1005 |
961 ln += 1 | 1006 ln += 1 |
962 self._map = {} | 1007 self._map = {} |
| 1008 current_vma = dict() |
963 while True: | 1009 while True: |
964 entry = proc_maps.ProcMaps.parse_line(self._lines[ln]) | 1010 entry = proc_maps.ProcMaps.parse_line(self._lines[ln]) |
965 if entry: | 1011 if entry: |
| 1012 current_vma = dict() |
966 for _, _, attr in self._procmaps.iter_range(entry.begin, entry.end): | 1013 for _, _, attr in self._procmaps.iter_range(entry.begin, entry.end): |
967 for key, value in entry.as_dict().iteritems(): | 1014 for key, value in entry.as_dict().iteritems(): |
968 attr[key] = value | 1015 attr[key] = value |
| 1016 current_vma[key] = value |
969 ln += 1 | 1017 ln += 1 |
970 continue | 1018 continue |
971 matched = self._HOOK_PATTERN.match(self._lines[ln]) | 1019 matched = self._HOOK_PATTERN.match(self._lines[ln]) |
972 if not matched: | 1020 if not matched: |
973 break | 1021 break |
974 # 2: starting address | 1022 # 2: starting address |
975 # 5: end address | 1023 # 5: end address |
976 # 7: hooked or unhooked | 1024 # 7: hooked or unhooked |
977 # 8: additional information | 1025 # 8: additional information |
| 1026 if matched.group(7) == 'hooked': |
| 1027 submatched = self._HOOKED_PATTERN.match(matched.group(8)) |
| 1028 if not submatched: |
| 1029 submatched = self._OLD_HOOKED_PATTERN.match(matched.group(8)) |
| 1030 elif matched.group(7) == 'unhooked': |
| 1031 submatched = self._UNHOOKED_PATTERN.match(matched.group(8)) |
| 1032 if not submatched: |
| 1033 submatched = self._OLD_UNHOOKED_PATTERN.match(matched.group(8)) |
| 1034 else: |
| 1035 assert matched.group(7) in ['hooked', 'unhooked'] |
| 1036 |
| 1037 submatched_dict = submatched.groupdict() |
| 1038 region_info = { 'vma': current_vma } |
| 1039 if 'TYPE' in submatched_dict: |
| 1040 region_info['type'] = submatched_dict['TYPE'].strip() |
| 1041 if 'COMMITTED' in submatched_dict: |
| 1042 region_info['committed'] = int(submatched_dict['COMMITTED']) |
| 1043 if 'RESERVED' in submatched_dict: |
| 1044 region_info['reserved'] = int(submatched_dict['RESERVED']) |
| 1045 if 'BUCKETID' in submatched_dict: |
| 1046 region_info['bucket_id'] = int(submatched_dict['BUCKETID']) |
| 1047 |
978 self._map[(int(matched.group(2), 16), | 1048 self._map[(int(matched.group(2), 16), |
979 int(matched.group(5), 16))] = (matched.group(7), | 1049 int(matched.group(5), 16))] = (matched.group(7), region_info) |
980 matched.group(8)) | |
981 ln += 1 | 1050 ln += 1 |
982 | 1051 |
983 def _extract_stacktrace_lines(self, line_number): | 1052 def _extract_stacktrace_lines(self, line_number): |
984 """Extracts the position of stacktrace lines. | 1053 """Extracts the position of stacktrace lines. |
985 | 1054 |
986 Valid stacktrace lines are stored into self._stacktrace_lines. | 1055 Valid stacktrace lines are stored into self._stacktrace_lines. |
987 | 1056 |
988 Args: | 1057 Args: |
989 line_number: A line number to start parsing in lines. | 1058 line_number: A line number to start parsing in lines. |
990 | 1059 |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1283 first_dump_time: An integer representing time when the first dump is | 1352 first_dump_time: An integer representing time when the first dump is |
1284 dumped. | 1353 dumped. |
1285 | 1354 |
1286 Returns: | 1355 Returns: |
1287 A dict mapping components and their corresponding sizes. | 1356 A dict mapping components and their corresponding sizes. |
1288 """ | 1357 """ |
1289 LOGGER.info(' %s' % dump.path) | 1358 LOGGER.info(' %s' % dump.path) |
1290 sizes = dict((c, 0) for c in policy.components) | 1359 sizes = dict((c, 0) for c in policy.components) |
1291 | 1360 |
1292 PolicyCommands._accumulate(dump, policy, bucket_set, sizes) | 1361 PolicyCommands._accumulate(dump, policy, bucket_set, sizes) |
| 1362 PolicyCommands._accumulate_maps(dump, policy, sizes) |
1293 | 1363 |
1294 sizes['mmap-no-log'] = ( | 1364 sizes['mmap-no-log'] = ( |
1295 dump.global_stat('profiled-mmap_committed') - | 1365 dump.global_stat('profiled-mmap_committed') - |
1296 sizes['mmap-total-log']) | 1366 sizes['mmap-total-log']) |
1297 sizes['mmap-total-record'] = dump.global_stat('profiled-mmap_committed') | 1367 sizes['mmap-total-record'] = dump.global_stat('profiled-mmap_committed') |
1298 sizes['mmap-total-record-vm'] = dump.global_stat('profiled-mmap_virtual') | 1368 sizes['mmap-total-record-vm'] = dump.global_stat('profiled-mmap_virtual') |
1299 | 1369 |
1300 sizes['tc-no-log'] = ( | 1370 sizes['tc-no-log'] = ( |
1301 dump.global_stat('profiled-malloc_committed') - | 1371 dump.global_stat('profiled-malloc_committed') - |
1302 sizes['tc-total-log']) | 1372 sizes['tc-total-log']) |
1303 sizes['tc-total-record'] = dump.global_stat('profiled-malloc_committed') | 1373 sizes['tc-total-record'] = dump.global_stat('profiled-malloc_committed') |
1304 sizes['tc-unused'] = ( | 1374 sizes['tc-unused'] = ( |
1305 sizes['mmap-tcmalloc'] - | 1375 sizes['mmap-tcmalloc'] - |
1306 dump.global_stat('profiled-malloc_committed')) | 1376 dump.global_stat('profiled-malloc_committed')) |
| 1377 if sizes['tc-unused'] < 0: |
| 1378 LOGGER.warn(' Assuming tc-unused=0 as it is negative: %d (bytes)' % |
| 1379 sizes['tc-unused']) |
| 1380 sizes['tc-unused'] = 0 |
1307 sizes['tc-total'] = sizes['mmap-tcmalloc'] | 1381 sizes['tc-total'] = sizes['mmap-tcmalloc'] |
1308 | 1382 |
1309 for key, value in { | 1383 for key, value in { |
1310 'total': 'total_committed', | 1384 'total': 'total_committed', |
1311 'filemapped': 'file_committed', | 1385 'filemapped': 'file_committed', |
1312 'absent': 'absent_committed', | 1386 'absent': 'absent_committed', |
1313 'file-exec': 'file-exec_committed', | 1387 'file-exec': 'file-exec_committed', |
1314 'file-nonexec': 'file-nonexec_committed', | 1388 'file-nonexec': 'file-nonexec_committed', |
1315 'anonymous': 'anonymous_committed', | 1389 'anonymous': 'anonymous_committed', |
1316 'stack': 'stack_committed', | 1390 'stack': 'stack_committed', |
1317 'other': 'other_committed', | 1391 'other': 'other_committed', |
1318 'unhooked-absent': 'nonprofiled-absent_committed', | 1392 'unhooked-absent': 'nonprofiled-absent_committed', |
1319 'unhooked-anonymous': 'nonprofiled-anonymous_committed', | |
1320 'unhooked-file-exec': 'nonprofiled-file-exec_committed', | |
1321 'unhooked-file-nonexec': 'nonprofiled-file-nonexec_committed', | |
1322 'unhooked-stack': 'nonprofiled-stack_committed', | |
1323 'unhooked-other': 'nonprofiled-other_committed', | |
1324 'total-vm': 'total_virtual', | 1393 'total-vm': 'total_virtual', |
1325 'filemapped-vm': 'file_virtual', | 1394 'filemapped-vm': 'file_virtual', |
1326 'anonymous-vm': 'anonymous_virtual', | 1395 'anonymous-vm': 'anonymous_virtual', |
1327 'other-vm': 'other_virtual' }.iteritems(): | 1396 'other-vm': 'other_virtual' }.iteritems(): |
1328 if key in sizes: | 1397 if key in sizes: |
1329 sizes[key] = dump.global_stat(value) | 1398 sizes[key] = dump.global_stat(value) |
1330 | 1399 |
1331 if 'mustbezero' in sizes: | 1400 if 'mustbezero' in sizes: |
1332 removed_list = ( | 1401 removed_list = ( |
1333 'profiled-mmap_committed', | 1402 'profiled-mmap_committed', |
(...skipping 27 matching lines...) Expand all Loading... |
1361 component_match = policy.find(bucket) | 1430 component_match = policy.find(bucket) |
1362 sizes[component_match] += int(words[COMMITTED]) | 1431 sizes[component_match] += int(words[COMMITTED]) |
1363 | 1432 |
1364 if component_match.startswith('tc-'): | 1433 if component_match.startswith('tc-'): |
1365 sizes['tc-total-log'] += int(words[COMMITTED]) | 1434 sizes['tc-total-log'] += int(words[COMMITTED]) |
1366 elif component_match.startswith('mmap-'): | 1435 elif component_match.startswith('mmap-'): |
1367 sizes['mmap-total-log'] += int(words[COMMITTED]) | 1436 sizes['mmap-total-log'] += int(words[COMMITTED]) |
1368 else: | 1437 else: |
1369 sizes['other-total-log'] += int(words[COMMITTED]) | 1438 sizes['other-total-log'] += int(words[COMMITTED]) |
1370 | 1439 |
| 1440 @staticmethod |
| 1441 def _accumulate_maps(dump, policy, sizes): |
| 1442 for _, value in dump.iter_map: |
| 1443 if value[0] == 'unhooked': |
| 1444 component_match = policy.find_unhooked(value) |
| 1445 sizes[component_match] += int(value[1]['committed']) |
| 1446 |
1371 | 1447 |
1372 class CSVCommand(PolicyCommands): | 1448 class CSVCommand(PolicyCommands): |
1373 def __init__(self): | 1449 def __init__(self): |
1374 super(CSVCommand, self).__init__('csv') | 1450 super(CSVCommand, self).__init__('csv') |
1375 | 1451 |
1376 def do(self, sys_argv): | 1452 def do(self, sys_argv): |
1377 policy_set, dumps, bucket_set = self._set_up(sys_argv) | 1453 policy_set, dumps, bucket_set = self._set_up(sys_argv) |
1378 return CSVCommand._output(policy_set, dumps, bucket_set, sys.stdout) | 1454 return CSVCommand._output(policy_set, dumps, bucket_set, sys.stdout) |
1379 | 1455 |
1380 @staticmethod | 1456 @staticmethod |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1507 max_dump_count_digit = len(str(max_dump_count)) | 1583 max_dump_count_digit = len(str(max_dump_count)) |
1508 for begin, end, attr in range_dict.iter_range(): | 1584 for begin, end, attr in range_dict.iter_range(): |
1509 out.write('%x-%x\n' % (begin, end)) | 1585 out.write('%x-%x\n' % (begin, end)) |
1510 if len(attr) < max_dump_count: | 1586 if len(attr) < max_dump_count: |
1511 attr[max_dump_count] = None | 1587 attr[max_dump_count] = None |
1512 for index, x in enumerate(attr[1:]): | 1588 for index, x in enumerate(attr[1:]): |
1513 out.write(' #%0*d: ' % (max_dump_count_digit, index + 1)) | 1589 out.write(' #%0*d: ' % (max_dump_count_digit, index + 1)) |
1514 if not x: | 1590 if not x: |
1515 out.write('None\n') | 1591 out.write('None\n') |
1516 elif x[0] == 'hooked': | 1592 elif x[0] == 'hooked': |
1517 attrs = x[1].split() | 1593 region_info = x[1] |
1518 assert len(attrs) == 3 | 1594 bucket_id = region_info['bucket_id'] |
1519 bucket_id = int(attrs[2]) | |
1520 bucket = bucket_set.get(bucket_id) | 1595 bucket = bucket_set.get(bucket_id) |
1521 component = policy.find(bucket) | 1596 component = policy.find(bucket) |
1522 out.write('hooked %s: %s @ %d\n' % (attrs[0], component, bucket_id)) | 1597 out.write('hooked %s: %s @ %d\n' % ( |
| 1598 region_info['type'] if 'type' in region_info else 'None', |
| 1599 component, bucket_id)) |
1523 else: | 1600 else: |
1524 attrs = x[1].split() | 1601 region_info = x[1] |
1525 size = int(attrs[1]) | 1602 size = region_info['committed'] |
1526 out.write('unhooked %s: %d bytes committed\n' % (attrs[0], size)) | 1603 out.write('unhooked %s: %d bytes committed\n' % ( |
| 1604 region_info['type'] if 'type' in region_info else 'None', size)) |
1527 | 1605 |
1528 | 1606 |
1529 class ExpandCommand(Command): | 1607 class ExpandCommand(Command): |
1530 def __init__(self): | 1608 def __init__(self): |
1531 super(ExpandCommand, self).__init__( | 1609 super(ExpandCommand, self).__init__( |
1532 'Usage: %prog expand <dump> <policy> <component> <depth>') | 1610 'Usage: %prog expand <dump> <policy> <component> <depth>') |
1533 | 1611 |
1534 def do(self, sys_argv): | 1612 def do(self, sys_argv): |
1535 _, args = self._parse_args(sys_argv, 4) | 1613 _, args = self._parse_args(sys_argv, 4) |
1536 dump_path = args[1] | 1614 dump_path = args[1] |
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1816 errorcode = COMMANDS[action]().do(sys.argv) | 1894 errorcode = COMMANDS[action]().do(sys.argv) |
1817 except ParsingException, e: | 1895 except ParsingException, e: |
1818 errorcode = 1 | 1896 errorcode = 1 |
1819 sys.stderr.write('Exit by parsing error: %s\n' % e) | 1897 sys.stderr.write('Exit by parsing error: %s\n' % e) |
1820 | 1898 |
1821 return errorcode | 1899 return errorcode |
1822 | 1900 |
1823 | 1901 |
1824 if __name__ == '__main__': | 1902 if __name__ == '__main__': |
1825 sys.exit(main()) | 1903 sys.exit(main()) |
OLD | NEW |