Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(310)

Side by Side Diff: src/heap-profiler.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap-profiler.h ('k') | src/ia32/assembler-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009-2010 the V8 project authors. All rights reserved. 1 // Copyright 2009-2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
65 const JSObjectsCluster& cluster, int size); 65 const JSObjectsCluster& cluster, int size);
66 }; 66 };
67 67
68 68
69 JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) { 69 JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
70 if (obj->IsJSObject()) { 70 if (obj->IsJSObject()) {
71 JSObject* js_obj = JSObject::cast(obj); 71 JSObject* js_obj = JSObject::cast(obj);
72 String* constructor = GetConstructorNameForHeapProfile( 72 String* constructor = GetConstructorNameForHeapProfile(
73 JSObject::cast(js_obj)); 73 JSObject::cast(js_obj));
74 // Differentiate Object and Array instances. 74 // Differentiate Object and Array instances.
75 if (fine_grain && (constructor == Heap::Object_symbol() || 75 if (fine_grain && (constructor == HEAP->Object_symbol() ||
76 constructor == Heap::Array_symbol())) { 76 constructor == HEAP->Array_symbol())) {
77 return JSObjectsCluster(constructor, obj); 77 return JSObjectsCluster(constructor, obj);
78 } else { 78 } else {
79 return JSObjectsCluster(constructor); 79 return JSObjectsCluster(constructor);
80 } 80 }
81 } else if (obj->IsString()) { 81 } else if (obj->IsString()) {
82 return JSObjectsCluster(Heap::String_symbol()); 82 return JSObjectsCluster(HEAP->String_symbol());
83 } else if (obj->IsJSGlobalPropertyCell()) { 83 } else if (obj->IsJSGlobalPropertyCell()) {
84 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY); 84 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
85 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) { 85 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
86 return JSObjectsCluster(JSObjectsCluster::CODE); 86 return JSObjectsCluster(JSObjectsCluster::CODE);
87 } 87 }
88 return JSObjectsCluster(); 88 return JSObjectsCluster();
89 } 89 }
90 90
91 91
92 void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree, 92 void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
(...skipping 12 matching lines...) Expand all
105 number_and_size.increment_number(1); 105 number_and_size.increment_number(1);
106 number_and_size.increment_bytes(size); 106 number_and_size.increment_bytes(size);
107 loc.set_value(number_and_size); 107 loc.set_value(number_and_size);
108 } 108 }
109 109
110 110
111 int Clusterizer::CalculateNetworkSize(JSObject* obj) { 111 int Clusterizer::CalculateNetworkSize(JSObject* obj) {
112 int size = obj->Size(); 112 int size = obj->Size();
113 // If 'properties' and 'elements' are non-empty (thus, non-shared), 113 // If 'properties' and 'elements' are non-empty (thus, non-shared),
114 // take their size into account. 114 // take their size into account.
115 if (obj->properties() != Heap::empty_fixed_array()) { 115 if (obj->properties() != HEAP->empty_fixed_array()) {
116 size += obj->properties()->Size(); 116 size += obj->properties()->Size();
117 } 117 }
118 if (obj->elements() != Heap::empty_fixed_array()) { 118 if (obj->elements() != HEAP->empty_fixed_array()) {
119 size += obj->elements()->Size(); 119 size += obj->elements()->Size();
120 } 120 }
121 // For functions, also account non-empty context and literals sizes. 121 // For functions, also account non-empty context and literals sizes.
122 if (obj->IsJSFunction()) { 122 if (obj->IsJSFunction()) {
123 JSFunction* f = JSFunction::cast(obj); 123 JSFunction* f = JSFunction::cast(obj);
124 if (f->unchecked_context()->IsContext()) { 124 if (f->unchecked_context()->IsContext()) {
125 size += f->context()->Size(); 125 size += f->context()->Size();
126 } 126 }
127 if (f->literals()->length() != 0) { 127 if (f->literals()->length() != 0) {
128 size += f->literals()->Size(); 128 size += f->literals()->Size();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
167 167
168 168
169 // A printer interface implementation for the Retainers profile. 169 // A printer interface implementation for the Retainers profile.
170 class RetainersPrinter : public RetainerHeapProfile::Printer { 170 class RetainersPrinter : public RetainerHeapProfile::Printer {
171 public: 171 public:
172 void PrintRetainers(const JSObjectsCluster& cluster, 172 void PrintRetainers(const JSObjectsCluster& cluster,
173 const StringStream& retainers) { 173 const StringStream& retainers) {
174 HeapStringAllocator allocator; 174 HeapStringAllocator allocator;
175 StringStream stream(&allocator); 175 StringStream stream(&allocator);
176 cluster.Print(&stream); 176 cluster.Print(&stream);
177 LOG(HeapSampleJSRetainersEvent( 177 LOG(ISOLATE,
178 HeapSampleJSRetainersEvent(
178 *(stream.ToCString()), *(retainers.ToCString()))); 179 *(stream.ToCString()), *(retainers.ToCString())));
179 } 180 }
180 }; 181 };
181 182
182 183
183 // Visitor for printing a cluster tree. 184 // Visitor for printing a cluster tree.
184 class ClusterTreePrinter BASE_EMBEDDED { 185 class ClusterTreePrinter BASE_EMBEDDED {
185 public: 186 public:
186 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {} 187 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {}
187 void Call(const JSObjectsCluster& cluster, 188 void Call(const JSObjectsCluster& cluster,
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
308 if (eq.is_null()) return; 309 if (eq.is_null()) return;
309 JSObjectsRetainerTree::Locator loc; 310 JSObjectsRetainerTree::Locator loc;
310 if (output_tree_.Insert(eq, &loc)) { 311 if (output_tree_.Insert(eq, &loc)) {
311 loc.set_value(new JSObjectsClusterTree()); 312 loc.set_value(new JSObjectsClusterTree());
312 } 313 }
313 RetainersAggregator retainers_aggregator(coarser_, loc.value()); 314 RetainersAggregator retainers_aggregator(coarser_, loc.value());
314 tree->ForEach(&retainers_aggregator); 315 tree->ForEach(&retainers_aggregator);
315 } 316 }
316 317
317 318
318 HeapProfiler* HeapProfiler::singleton_ = NULL;
319
320 HeapProfiler::HeapProfiler() 319 HeapProfiler::HeapProfiler()
321 : snapshots_(new HeapSnapshotsCollection()), 320 : snapshots_(new HeapSnapshotsCollection()),
322 next_snapshot_uid_(1) { 321 next_snapshot_uid_(1) {
323 } 322 }
324 323
325 324
326 HeapProfiler::~HeapProfiler() { 325 HeapProfiler::~HeapProfiler() {
327 delete snapshots_; 326 delete snapshots_;
328 } 327 }
329 328
330 #endif // ENABLE_LOGGING_AND_PROFILING 329 #endif // ENABLE_LOGGING_AND_PROFILING
331 330
332 void HeapProfiler::Setup() { 331 void HeapProfiler::Setup() {
333 #ifdef ENABLE_LOGGING_AND_PROFILING 332 #ifdef ENABLE_LOGGING_AND_PROFILING
334 if (singleton_ == NULL) { 333 Isolate* isolate = Isolate::Current();
335 singleton_ = new HeapProfiler(); 334 if (isolate->heap_profiler() == NULL) {
335 isolate->set_heap_profiler(new HeapProfiler());
336 } 336 }
337 #endif 337 #endif
338 } 338 }
339 339
340 340
341 void HeapProfiler::TearDown() { 341 void HeapProfiler::TearDown() {
342 #ifdef ENABLE_LOGGING_AND_PROFILING 342 #ifdef ENABLE_LOGGING_AND_PROFILING
343 delete singleton_; 343 Isolate* isolate = Isolate::Current();
344 singleton_ = NULL; 344 delete isolate->heap_profiler();
345 isolate->set_heap_profiler(NULL);
345 #endif 346 #endif
346 } 347 }
347 348
348 349
349 #ifdef ENABLE_LOGGING_AND_PROFILING 350 #ifdef ENABLE_LOGGING_AND_PROFILING
350 351
351 HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name, 352 HeapSnapshot* HeapProfiler::TakeSnapshot(const char* name,
352 int type, 353 int type,
353 v8::ActivityControl* control) { 354 v8::ActivityControl* control) {
354 ASSERT(singleton_ != NULL); 355 ASSERT(Isolate::Current()->heap_profiler() != NULL);
355 return singleton_->TakeSnapshotImpl(name, type, control); 356 return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
357 type,
358 control);
356 } 359 }
357 360
358 361
359 HeapSnapshot* HeapProfiler::TakeSnapshot(String* name, 362 HeapSnapshot* HeapProfiler::TakeSnapshot(String* name,
360 int type, 363 int type,
361 v8::ActivityControl* control) { 364 v8::ActivityControl* control) {
362 ASSERT(singleton_ != NULL); 365 ASSERT(Isolate::Current()->heap_profiler() != NULL);
363 return singleton_->TakeSnapshotImpl(name, type, control); 366 return Isolate::Current()->heap_profiler()->TakeSnapshotImpl(name,
367 type,
368 control);
364 } 369 }
365 370
366 371
367 void HeapProfiler::DefineWrapperClass( 372 void HeapProfiler::DefineWrapperClass(
368 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) { 373 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) {
369 ASSERT(singleton_ != NULL);
370 ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId); 374 ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId);
371 if (singleton_->wrapper_callbacks_.length() <= class_id) { 375 if (wrapper_callbacks_.length() <= class_id) {
372 singleton_->wrapper_callbacks_.AddBlock( 376 wrapper_callbacks_.AddBlock(
373 NULL, class_id - singleton_->wrapper_callbacks_.length() + 1); 377 NULL, class_id - wrapper_callbacks_.length() + 1);
374 } 378 }
375 singleton_->wrapper_callbacks_[class_id] = callback; 379 wrapper_callbacks_[class_id] = callback;
376 } 380 }
377 381
378 382
379 v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback( 383 v8::RetainedObjectInfo* HeapProfiler::ExecuteWrapperClassCallback(
380 uint16_t class_id, Object** wrapper) { 384 uint16_t class_id, Object** wrapper) {
381 ASSERT(singleton_ != NULL); 385 if (wrapper_callbacks_.length() <= class_id) return NULL;
382 if (singleton_->wrapper_callbacks_.length() <= class_id) return NULL; 386 return wrapper_callbacks_[class_id](
383 return singleton_->wrapper_callbacks_[class_id](
384 class_id, Utils::ToLocal(Handle<Object>(wrapper))); 387 class_id, Utils::ToLocal(Handle<Object>(wrapper)));
385 } 388 }
386 389
387 390
388 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name, 391 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(const char* name,
389 int type, 392 int type,
390 v8::ActivityControl* control) { 393 v8::ActivityControl* control) {
391 HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type); 394 HeapSnapshot::Type s_type = static_cast<HeapSnapshot::Type>(type);
392 HeapSnapshot* result = 395 HeapSnapshot* result =
393 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++); 396 snapshots_->NewSnapshot(s_type, name, next_snapshot_uid_++);
394 bool generation_completed = true; 397 bool generation_completed = true;
395 switch (s_type) { 398 switch (s_type) {
396 case HeapSnapshot::kFull: { 399 case HeapSnapshot::kFull: {
397 Heap::CollectAllGarbage(true); 400 HEAP->CollectAllGarbage(true);
398 HeapSnapshotGenerator generator(result, control); 401 HeapSnapshotGenerator generator(result, control);
399 generation_completed = generator.GenerateSnapshot(); 402 generation_completed = generator.GenerateSnapshot();
400 break; 403 break;
401 } 404 }
402 case HeapSnapshot::kAggregated: { 405 case HeapSnapshot::kAggregated: {
403 Heap::CollectAllGarbage(true); 406 HEAP->CollectAllGarbage(true);
404 AggregatedHeapSnapshot agg_snapshot; 407 AggregatedHeapSnapshot agg_snapshot;
405 AggregatedHeapSnapshotGenerator generator(&agg_snapshot); 408 AggregatedHeapSnapshotGenerator generator(&agg_snapshot);
406 generator.GenerateSnapshot(); 409 generator.GenerateSnapshot();
407 generator.FillHeapSnapshot(result); 410 generator.FillHeapSnapshot(result);
408 break; 411 break;
409 } 412 }
410 default: 413 default:
411 UNREACHABLE(); 414 UNREACHABLE();
412 } 415 }
413 if (!generation_completed) { 416 if (!generation_completed) {
414 delete result; 417 delete result;
415 result = NULL; 418 result = NULL;
416 } 419 }
417 snapshots_->SnapshotGenerationFinished(result); 420 snapshots_->SnapshotGenerationFinished(result);
418 return result; 421 return result;
419 } 422 }
420 423
421 424
422 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name, 425 HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
423 int type, 426 int type,
424 v8::ActivityControl* control) { 427 v8::ActivityControl* control) {
425 return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control); 428 return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control);
426 } 429 }
427 430
428 431
429 int HeapProfiler::GetSnapshotsCount() { 432 int HeapProfiler::GetSnapshotsCount() {
430 ASSERT(singleton_ != NULL); 433 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
431 return singleton_->snapshots_->snapshots()->length(); 434 ASSERT(profiler != NULL);
435 return profiler->snapshots_->snapshots()->length();
432 } 436 }
433 437
434 438
435 HeapSnapshot* HeapProfiler::GetSnapshot(int index) { 439 HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
436 ASSERT(singleton_ != NULL); 440 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
437 return singleton_->snapshots_->snapshots()->at(index); 441 ASSERT(profiler != NULL);
442 return profiler->snapshots_->snapshots()->at(index);
438 } 443 }
439 444
440 445
441 HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) { 446 HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
442 ASSERT(singleton_ != NULL); 447 HeapProfiler* profiler = Isolate::Current()->heap_profiler();
443 return singleton_->snapshots_->GetSnapshot(uid); 448 ASSERT(profiler != NULL);
449 return profiler->snapshots_->GetSnapshot(uid);
444 } 450 }
445 451
446 452
447 void HeapProfiler::ObjectMoveEvent(Address from, Address to) { 453 void HeapProfiler::ObjectMoveEvent(Address from, Address to) {
448 ASSERT(singleton_ != NULL); 454 snapshots_->ObjectMoveEvent(from, to);
449 singleton_->snapshots_->ObjectMoveEvent(from, to);
450 } 455 }
451 456
452 457
453 const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey; 458 const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
454 const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue; 459 const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
455 460
456 461
457 ConstructorHeapProfile::ConstructorHeapProfile() 462 ConstructorHeapProfile::ConstructorHeapProfile()
458 : zscope_(DELETE_ON_EXIT) { 463 : zscope_(DELETE_ON_EXIT) {
459 } 464 }
460 465
461 466
462 void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster, 467 void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
463 const NumberAndSizeInfo& number_and_size) { 468 const NumberAndSizeInfo& number_and_size) {
464 HeapStringAllocator allocator; 469 HeapStringAllocator allocator;
465 StringStream stream(&allocator); 470 StringStream stream(&allocator);
466 cluster.Print(&stream); 471 cluster.Print(&stream);
467 LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()), 472 LOG(ISOLATE,
473 HeapSampleJSConstructorEvent(*(stream.ToCString()),
468 number_and_size.number(), 474 number_and_size.number(),
469 number_and_size.bytes())); 475 number_and_size.bytes()));
470 } 476 }
471 477
472 478
473 void ConstructorHeapProfile::CollectStats(HeapObject* obj) { 479 void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
474 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false); 480 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
475 } 481 }
476 482
477 483
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
676 const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey; 682 const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
677 const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue = 683 const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
678 NULL; 684 NULL;
679 685
680 686
681 RetainerHeapProfile::RetainerHeapProfile() 687 RetainerHeapProfile::RetainerHeapProfile()
682 : zscope_(DELETE_ON_EXIT), 688 : zscope_(DELETE_ON_EXIT),
683 aggregator_(NULL) { 689 aggregator_(NULL) {
684 JSObjectsCluster roots(JSObjectsCluster::ROOTS); 690 JSObjectsCluster roots(JSObjectsCluster::ROOTS);
685 ReferencesExtractor extractor(roots, this); 691 ReferencesExtractor extractor(roots, this);
686 Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG); 692 HEAP->IterateRoots(&extractor, VISIT_ONLY_STRONG);
687 } 693 }
688 694
689 695
690 RetainerHeapProfile::~RetainerHeapProfile() { 696 RetainerHeapProfile::~RetainerHeapProfile() {
691 delete aggregator_; 697 delete aggregator_;
692 } 698 }
693 699
694 700
695 void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster, 701 void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
696 HeapObject* ref) { 702 HeapObject* ref) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
746 DeleteArray(static_cast<Address*>(trace)); 752 DeleteArray(static_cast<Address*>(trace));
747 object.Dispose(); 753 object.Dispose();
748 } 754 }
749 755
750 756
751 static void PrintProducerStackTrace(Object* obj, void* trace) { 757 static void PrintProducerStackTrace(Object* obj, void* trace) {
752 if (!obj->IsJSObject()) return; 758 if (!obj->IsJSObject()) return;
753 String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj)); 759 String* constructor = GetConstructorNameForHeapProfile(JSObject::cast(obj));
754 SmartPointer<char> s_name( 760 SmartPointer<char> s_name(
755 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); 761 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
756 LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name), 762 LOG(ISOLATE,
763 HeapSampleJSProducerEvent(GetConstructorName(*s_name),
757 reinterpret_cast<Address*>(trace))); 764 reinterpret_cast<Address*>(trace)));
758 } 765 }
759 766
760 767
761 void HeapProfiler::WriteSample() { 768 void HeapProfiler::WriteSample() {
762 LOG(HeapSampleBeginEvent("Heap", "allocated")); 769 Isolate* isolate = Isolate::Current();
763 LOG(HeapSampleStats( 770 LOG(isolate, HeapSampleBeginEvent("Heap", "allocated"));
764 "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects())); 771 LOG(isolate,
772 HeapSampleStats(
773 "Heap", "allocated", HEAP->CommittedMemory(), HEAP->SizeOfObjects()));
765 774
766 AggregatedHeapSnapshot snapshot; 775 AggregatedHeapSnapshot snapshot;
767 AggregatedHeapSnapshotGenerator generator(&snapshot); 776 AggregatedHeapSnapshotGenerator generator(&snapshot);
768 generator.GenerateSnapshot(); 777 generator.GenerateSnapshot();
769 778
770 HistogramInfo* info = snapshot.info(); 779 HistogramInfo* info = snapshot.info();
771 for (int i = FIRST_NONSTRING_TYPE; 780 for (int i = FIRST_NONSTRING_TYPE;
772 i <= AggregatedHeapSnapshotGenerator::kAllStringsType; 781 i <= AggregatedHeapSnapshotGenerator::kAllStringsType;
773 ++i) { 782 ++i) {
774 if (info[i].bytes() > 0) { 783 if (info[i].bytes() > 0) {
775 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(), 784 LOG(isolate,
785 HeapSampleItemEvent(info[i].name(), info[i].number(),
776 info[i].bytes())); 786 info[i].bytes()));
777 } 787 }
778 } 788 }
779 789
780 snapshot.js_cons_profile()->PrintStats(); 790 snapshot.js_cons_profile()->PrintStats();
781 snapshot.js_retainer_profile()->PrintStats(); 791 snapshot.js_retainer_profile()->PrintStats();
782 792
783 GlobalHandles::IterateWeakRoots(PrintProducerStackTrace, 793 isolate->global_handles()->IterateWeakRoots(PrintProducerStackTrace,
784 StackWeakReferenceCallback); 794 StackWeakReferenceCallback);
785 795
786 LOG(HeapSampleEndEvent("Heap", "allocated")); 796 LOG(isolate, HeapSampleEndEvent("Heap", "allocated"));
787 } 797 }
788 798
789 799
790 AggregatedHeapSnapshot::AggregatedHeapSnapshot() 800 AggregatedHeapSnapshot::AggregatedHeapSnapshot()
791 : info_(NewArray<HistogramInfo>( 801 : info_(NewArray<HistogramInfo>(
792 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) { 802 AggregatedHeapSnapshotGenerator::kAllStringsType + 1)) {
793 #define DEF_TYPE_NAME(name) info_[name].set_name(#name); 803 #define DEF_TYPE_NAME(name) info_[name].set_name(#name);
794 INSTANCE_TYPE_LIST(DEF_TYPE_NAME); 804 INSTANCE_TYPE_LIST(DEF_TYPE_NAME);
795 #undef DEF_TYPE_NAME 805 #undef DEF_TYPE_NAME
796 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name( 806 info_[AggregatedHeapSnapshotGenerator::kAllStringsType].set_name(
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
1110 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter); 1120 agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
1111 entries_map.AllocateEntries(); 1121 entries_map.AllocateEntries();
1112 1122
1113 // Fill up references. 1123 // Fill up references.
1114 IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map); 1124 IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map);
1115 1125
1116 snapshot->SetDominatorsToSelf(); 1126 snapshot->SetDominatorsToSelf();
1117 } 1127 }
1118 1128
1119 1129
1120 bool ProducerHeapProfile::can_log_ = false;
1121
1122 void ProducerHeapProfile::Setup() { 1130 void ProducerHeapProfile::Setup() {
1123 can_log_ = true; 1131 can_log_ = true;
1124 } 1132 }
1125 1133
1126 void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) { 1134 void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) {
1127 ASSERT(FLAG_log_producers); 1135 ASSERT(FLAG_log_producers);
1128 if (!can_log_) return; 1136 if (!can_log_) return;
1129 int framesCount = 0; 1137 int framesCount = 0;
1130 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { 1138 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1131 ++framesCount; 1139 ++framesCount;
1132 } 1140 }
1133 if (framesCount == 0) return; 1141 if (framesCount == 0) return;
1134 ++framesCount; // Reserve place for the terminator item. 1142 ++framesCount; // Reserve place for the terminator item.
1135 Vector<Address> stack(NewArray<Address>(framesCount), framesCount); 1143 Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
1136 int i = 0; 1144 int i = 0;
1137 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { 1145 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
1138 stack[i++] = it.frame()->pc(); 1146 stack[i++] = it.frame()->pc();
1139 } 1147 }
1140 stack[i] = NULL; 1148 stack[i] = NULL;
1141 Handle<Object> handle = GlobalHandles::Create(obj); 1149 Handle<Object> handle = isolate_->global_handles()->Create(obj);
1142 GlobalHandles::MakeWeak(handle.location(), 1150 isolate_->global_handles()->MakeWeak(handle.location(),
1143 static_cast<void*>(stack.start()), 1151 static_cast<void*>(stack.start()),
1144 StackWeakReferenceCallback); 1152 StackWeakReferenceCallback);
1145 } 1153 }
1146 1154
1147 1155
1148 #endif // ENABLE_LOGGING_AND_PROFILING 1156 #endif // ENABLE_LOGGING_AND_PROFILING
1149 1157
1150 1158
1151 } } // namespace v8::internal 1159 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap-profiler.h ('k') | src/ia32/assembler-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698